mirror of
https://git.proxmox.com/git/rustc
synced 2025-12-09 23:08:52 +00:00
New upstream version 1.75.0+dfsg1
This commit is contained in:
parent
781aab860e
commit
ed00b5ec96
489
Cargo.lock
generated
489
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
16
README.md
16
README.md
@ -11,6 +11,20 @@ standard library, and documentation.
|
|||||||
If you wish to _contribute_ to the compiler, you should read
|
If you wish to _contribute_ to the compiler, you should read
|
||||||
[CONTRIBUTING.md](CONTRIBUTING.md) instead.
|
[CONTRIBUTING.md](CONTRIBUTING.md) instead.
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>Table of content</summary>
|
||||||
|
|
||||||
|
- [Quick Start](#quick-start)
|
||||||
|
- [Installing from Source](#installing-from-source)
|
||||||
|
- [Building Documentation](#building-documentation)
|
||||||
|
- [Notes](#notes)
|
||||||
|
- [Getting Help](#getting-help)
|
||||||
|
- [Contributing](#contributing)
|
||||||
|
- [License](#license)
|
||||||
|
- [Trademark](#trademark)
|
||||||
|
|
||||||
|
</details>
|
||||||
|
|
||||||
## Quick Start
|
## Quick Start
|
||||||
|
|
||||||
Read ["Installation"] from [The Book].
|
Read ["Installation"] from [The Book].
|
||||||
@ -116,7 +130,7 @@ See [the rustc-dev-guide for more info][sysllvm].
|
|||||||
#### Configure and Make
|
#### Configure and Make
|
||||||
|
|
||||||
This project provides a configure script and makefile (the latter of which just
|
This project provides a configure script and makefile (the latter of which just
|
||||||
invokes `x.py`). `./configure` is the recommended way to programatically
|
invokes `x.py`). `./configure` is the recommended way to programmatically
|
||||||
generate a `config.toml`. `make` is not recommended (we suggest using `x.py`
|
generate a `config.toml`. `make` is not recommended (we suggest using `x.py`
|
||||||
directly), but it is supported and we try not to break it unnecessarily.
|
directly), but it is supported and we try not to break it unnecessarily.
|
||||||
|
|
||||||
|
|||||||
155
RELEASES.md
155
RELEASES.md
@ -1,3 +1,128 @@
|
|||||||
|
Version 1.75.0 (2023-12-28)
|
||||||
|
==========================
|
||||||
|
|
||||||
|
<a id="1.75.0-Language"></a>
|
||||||
|
|
||||||
|
Language
|
||||||
|
--------
|
||||||
|
|
||||||
|
- [Stabilize `async fn` and return-position `impl Trait` in traits.](https://github.com/rust-lang/rust/pull/115822/)
|
||||||
|
- [Allow function pointer signatures containing `&mut T` in `const` contexts.](https://github.com/rust-lang/rust/pull/116015/)
|
||||||
|
- [Match `usize`/`isize` exhaustively with half-open ranges.](https://github.com/rust-lang/rust/pull/116692/)
|
||||||
|
- [Guarantee that `char` has the same size and alignment as `u32`.](https://github.com/rust-lang/rust/pull/116894/)
|
||||||
|
- [Document that the null pointer has the 0 address.](https://github.com/rust-lang/rust/pull/116988/)
|
||||||
|
- [Allow partially moved values in `match`.](https://github.com/rust-lang/rust/pull/103208/)
|
||||||
|
- [Add notes about non-compliant FP behavior on 32bit x86 targets.](https://github.com/rust-lang/rust/pull/113053/)
|
||||||
|
- [Stabilize ratified RISC-V target features.](https://github.com/rust-lang/rust/pull/116485/)
|
||||||
|
|
||||||
|
<a id="1.75.0-Compiler"></a>
|
||||||
|
|
||||||
|
Compiler
|
||||||
|
--------
|
||||||
|
|
||||||
|
- [Rework negative coherence to properly consider impls that only partly overlap.](https://github.com/rust-lang/rust/pull/112875/)
|
||||||
|
- [Bump `COINDUCTIVE_OVERLAP_IN_COHERENCE` to deny, and warn in dependencies.](https://github.com/rust-lang/rust/pull/116493/)
|
||||||
|
- [Consider alias bounds when computing liveness in NLL.](https://github.com/rust-lang/rust/pull/116733/)
|
||||||
|
- [Add the V (vector) extension to the `riscv64-linux-android` target spec.](https://github.com/rust-lang/rust/pull/116618/)
|
||||||
|
- [Automatically enable cross-crate inlining for small functions](https://github.com/rust-lang/rust/pull/116505)
|
||||||
|
- Add several new tier 3 targets:
|
||||||
|
- [`csky-unknown-linux-gnuabiv2hf`](https://github.com/rust-lang/rust/pull/117049/)
|
||||||
|
- [`i586-unknown-netbsd`](https://github.com/rust-lang/rust/pull/117170/)
|
||||||
|
- [`mipsel-unknown-netbsd`](https://github.com/rust-lang/rust/pull/117356/)
|
||||||
|
|
||||||
|
Refer to Rust's [platform support page][platform-support-doc]
|
||||||
|
for more information on Rust's tiered platform support.
|
||||||
|
|
||||||
|
<a id="1.75.0-Libraries"></a>
|
||||||
|
|
||||||
|
Libraries
|
||||||
|
---------
|
||||||
|
|
||||||
|
- [Override `Waker::clone_from` to avoid cloning `Waker`s unnecessarily.](https://github.com/rust-lang/rust/pull/96979/)
|
||||||
|
- [Implement `BufRead` for `VecDeque<u8>`.](https://github.com/rust-lang/rust/pull/110604/)
|
||||||
|
- [Implement `FusedIterator` for `DecodeUtf16` when the inner iterator does.](https://github.com/rust-lang/rust/pull/110729/)
|
||||||
|
- [Implement `Not, Bit{And,Or}{,Assign}` for IP addresses.](https://github.com/rust-lang/rust/pull/113747/)
|
||||||
|
- [Implement `Default` for `ExitCode`.](https://github.com/rust-lang/rust/pull/114589/)
|
||||||
|
- [Guarantee representation of None in NPO](https://github.com/rust-lang/rust/pull/115333/)
|
||||||
|
- [Document when atomic loads are guaranteed read-only.](https://github.com/rust-lang/rust/pull/115577/)
|
||||||
|
- [Broaden the consequences of recursive TLS initialization.](https://github.com/rust-lang/rust/pull/116172/)
|
||||||
|
- [Windows: Support sub-millisecond sleep.](https://github.com/rust-lang/rust/pull/116461/)
|
||||||
|
- [Fix generic bound of `str::SplitInclusive`'s `DoubleEndedIterator` impl](https://github.com/rust-lang/rust/pull/100806/)
|
||||||
|
- [Fix exit status / wait status on non-Unix `cfg(unix)` platforms.](https://github.com/rust-lang/rust/pull/115108/)
|
||||||
|
|
||||||
|
<a id="1.75.0-Stabilized-APIs"></a>
|
||||||
|
|
||||||
|
Stabilized APIs
|
||||||
|
---------------
|
||||||
|
|
||||||
|
- [`Atomic*::from_ptr`](https://doc.rust-lang.org/stable/core/sync/atomic/struct.AtomicUsize.html#method.from_ptr)
|
||||||
|
- [`FileTimes`](https://doc.rust-lang.org/stable/std/fs/struct.FileTimes.html)
|
||||||
|
- [`FileTimesExt`](https://doc.rust-lang.org/stable/std/os/windows/fs/trait.FileTimesExt.html)
|
||||||
|
- [`File::set_modified`](https://doc.rust-lang.org/stable/std/fs/struct.File.html#method.set_modified)
|
||||||
|
- [`File::set_times`](https://doc.rust-lang.org/stable/std/fs/struct.File.html#method.set_times)
|
||||||
|
- [`IpAddr::to_canonical`](https://doc.rust-lang.org/stable/core/net/enum.IpAddr.html#method.to_canonical)
|
||||||
|
- [`Ipv6Addr::to_canonical`](https://doc.rust-lang.org/stable/core/net/struct.Ipv6Addr.html#method.to_canonical)
|
||||||
|
- [`Option::as_slice`](https://doc.rust-lang.org/stable/core/option/enum.Option.html#method.as_slice)
|
||||||
|
- [`Option::as_mut_slice`](https://doc.rust-lang.org/stable/core/option/enum.Option.html#method.as_mut_slice)
|
||||||
|
- [`pointer::byte_add`](https://doc.rust-lang.org/stable/core/primitive.pointer.html#method.byte_add)
|
||||||
|
- [`pointer::byte_offset`](https://doc.rust-lang.org/stable/core/primitive.pointer.html#method.byte_offset)
|
||||||
|
- [`pointer::byte_offset_from`](https://doc.rust-lang.org/stable/core/primitive.pointer.html#method.byte_offset_from)
|
||||||
|
- [`pointer::byte_sub`](https://doc.rust-lang.org/stable/core/primitive.pointer.html#method.byte_sub)
|
||||||
|
- [`pointer::wrapping_byte_add`](https://doc.rust-lang.org/stable/core/primitive.pointer.html#method.wrapping_byte_add)
|
||||||
|
- [`pointer::wrapping_byte_offset`](https://doc.rust-lang.org/stable/core/primitive.pointer.html#method.wrapping_byte_offset)
|
||||||
|
- [`pointer::wrapping_byte_sub`](https://doc.rust-lang.org/stable/core/primitive.pointer.html#method.wrapping_byte_sub)
|
||||||
|
|
||||||
|
These APIs are now stable in const contexts:
|
||||||
|
|
||||||
|
- [`Ipv6Addr::to_ipv4_mapped`](https://doc.rust-lang.org/stable/core/net/struct.Ipv6Addr.html#method.to_ipv4_mapped)
|
||||||
|
- [`MaybeUninit::assume_init_read`](https://doc.rust-lang.org/stable/core/mem/union.MaybeUninit.html#method.assume_init_read)
|
||||||
|
- [`MaybeUninit::zeroed`](https://doc.rust-lang.org/stable/core/mem/union.MaybeUninit.html#method.zeroed)
|
||||||
|
- [`mem::discriminant`](https://doc.rust-lang.org/stable/core/mem/fn.discriminant.html)
|
||||||
|
- [`mem::zeroed`](https://doc.rust-lang.org/stable/core/mem/fn.zeroed.html)
|
||||||
|
|
||||||
|
<a id="1.75.0-Cargo"></a>
|
||||||
|
|
||||||
|
Cargo
|
||||||
|
-----
|
||||||
|
|
||||||
|
- [Add new packages to `[workspace.members]` automatically.](https://github.com/rust-lang/cargo/pull/12779/)
|
||||||
|
- [Allow version-less `Cargo.toml` manifests.](https://github.com/rust-lang/cargo/pull/12786/)
|
||||||
|
- [Make browser links out of HTML file paths.](https://github.com/rust-lang/cargo/pull/12889)
|
||||||
|
|
||||||
|
<a id="1.75.0-Rustdoc"></a>
|
||||||
|
|
||||||
|
Rustdoc
|
||||||
|
-------
|
||||||
|
|
||||||
|
- [Accept less invalid Rust in rustdoc.](https://github.com/rust-lang/rust/pull/117450/)
|
||||||
|
- [Document lack of object safety on affected traits.](https://github.com/rust-lang/rust/pull/113241/)
|
||||||
|
- [Hide `#[repr(transparent)]` if it isn't part of the public ABI.](https://github.com/rust-lang/rust/pull/115439/)
|
||||||
|
- [Show enum discriminant if it is a C-like variant.](https://github.com/rust-lang/rust/pull/116142/)
|
||||||
|
|
||||||
|
<a id="1.75.0-Compatibility-Notes"></a>
|
||||||
|
|
||||||
|
Compatibility Notes
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
- [FreeBSD targets now require at least version 12.](https://github.com/rust-lang/rust/pull/114521/)
|
||||||
|
- [Formally demote tier 2 MIPS targets to tier 3.](https://github.com/rust-lang/rust/pull/115238/)
|
||||||
|
- [Make misalignment a hard error in `const` contexts.](https://github.com/rust-lang/rust/pull/115524/)
|
||||||
|
- [Fix detecting references to packed unsized fields.](https://github.com/rust-lang/rust/pull/115583/)
|
||||||
|
- [Remove support for compiler plugins.](https://github.com/rust-lang/rust/pull/116412/)
|
||||||
|
|
||||||
|
<a id="1.75.0-Internal-Changes"></a>
|
||||||
|
|
||||||
|
Internal Changes
|
||||||
|
----------------
|
||||||
|
|
||||||
|
These changes do not affect any public interfaces of Rust, but they represent
|
||||||
|
significant improvements to the performance or internals of rustc and related
|
||||||
|
tools.
|
||||||
|
|
||||||
|
- [Optimize `librustc_driver.so` with BOLT.](https://github.com/rust-lang/rust/pull/116352/)
|
||||||
|
- [Enable parallel rustc front end in dev and nightly builds.](https://github.com/rust-lang/rust/pull/117435/)
|
||||||
|
- [Distribute `rustc-codegen-cranelift` as rustup component on the nightly channel.](https://github.com/rust-lang/rust/pull/81746/)
|
||||||
|
|
||||||
Version 1.74.1 (2023-12-07)
|
Version 1.74.1 (2023-12-07)
|
||||||
===========================
|
===========================
|
||||||
|
|
||||||
@ -14,11 +139,12 @@ Language
|
|||||||
--------
|
--------
|
||||||
|
|
||||||
- [Codify that `std::mem::Discriminant<T>` does not depend on any lifetimes in T](https://github.com/rust-lang/rust/pull/104299/)
|
- [Codify that `std::mem::Discriminant<T>` does not depend on any lifetimes in T](https://github.com/rust-lang/rust/pull/104299/)
|
||||||
- [Replace `private_in_public` lint with `private_interfaces` and `private_bounds` per RFC 2145](https://github.com/rust-lang/rust/pull/113126/)
|
- [Replace `private_in_public` lint with `private_interfaces` and `private_bounds` per RFC 2145.](https://github.com/rust-lang/rust/pull/113126/)
|
||||||
Read more in [RFC 2145](https://rust-lang.github.io/rfcs/2145-type-privacy.html).
|
Read more in [RFC 2145](https://rust-lang.github.io/rfcs/2145-type-privacy.html).
|
||||||
- [Allow explicit `#[repr(Rust)]`](https://github.com/rust-lang/rust/pull/114201/)
|
- [Allow explicit `#[repr(Rust)]`](https://github.com/rust-lang/rust/pull/114201/)
|
||||||
- [closure field capturing: don't depend on alignment of packed fields](https://github.com/rust-lang/rust/pull/115315/)
|
- [closure field capturing: don't depend on alignment of packed fields](https://github.com/rust-lang/rust/pull/115315/)
|
||||||
- [Enable MIR-based drop-tracking for `async` blocks](https://github.com/rust-lang/rust/pull/107421/)
|
- [Enable MIR-based drop-tracking for `async` blocks](https://github.com/rust-lang/rust/pull/107421/)
|
||||||
|
- [Stabilize `impl_trait_projections`](https://github.com/rust-lang/rust/pull/115659)
|
||||||
|
|
||||||
<a id="1.74.0-Compiler"></a>
|
<a id="1.74.0-Compiler"></a>
|
||||||
|
|
||||||
@ -53,8 +179,8 @@ Stabilized APIs
|
|||||||
- [`core::num::Saturating`](https://doc.rust-lang.org/stable/std/num/struct.Saturating.html)
|
- [`core::num::Saturating`](https://doc.rust-lang.org/stable/std/num/struct.Saturating.html)
|
||||||
- [`impl From<io::Stdout> for std::process::Stdio`](https://doc.rust-lang.org/stable/std/process/struct.Stdio.html#impl-From%3CStdout%3E-for-Stdio)
|
- [`impl From<io::Stdout> for std::process::Stdio`](https://doc.rust-lang.org/stable/std/process/struct.Stdio.html#impl-From%3CStdout%3E-for-Stdio)
|
||||||
- [`impl From<io::Stderr> for std::process::Stdio`](https://doc.rust-lang.org/stable/std/process/struct.Stdio.html#impl-From%3CStderr%3E-for-Stdio)
|
- [`impl From<io::Stderr> for std::process::Stdio`](https://doc.rust-lang.org/stable/std/process/struct.Stdio.html#impl-From%3CStderr%3E-for-Stdio)
|
||||||
- [`impl From<OwnedHandle> for std::process::Child{Stdin, Stdout, Stderr}`](https://doc.rust-lang.org/stable/std/process/struct.Stdio.html#impl-From%3CStderr%3E-for-Stdio)
|
- [`impl From<OwnedHandle> for std::process::Child{Stdin, Stdout, Stderr}`](https://doc.rust-lang.org/stable/std/process/struct.ChildStderr.html#impl-From%3COwnedHandle%3E-for-ChildStderr)
|
||||||
- [`impl From<OwnedFd> for std::process::Child{Stdin, Stdout, Stderr}`](https://doc.rust-lang.org/stable/std/process/struct.Stdio.html#impl-From%3CStderr%3E-for-Stdio)
|
- [`impl From<OwnedFd> for std::process::Child{Stdin, Stdout, Stderr}`](https://doc.rust-lang.org/stable/std/process/struct.ChildStderr.html#impl-From%3COwnedFd%3E-for-ChildStderr)
|
||||||
- [`std::ffi::OsString::from_encoded_bytes_unchecked`](https://doc.rust-lang.org/stable/std/ffi/struct.OsString.html#method.from_encoded_bytes_unchecked)
|
- [`std::ffi::OsString::from_encoded_bytes_unchecked`](https://doc.rust-lang.org/stable/std/ffi/struct.OsString.html#method.from_encoded_bytes_unchecked)
|
||||||
- [`std::ffi::OsString::into_encoded_bytes`](https://doc.rust-lang.org/stable/std/ffi/struct.OsString.html#method.into_encoded_bytes)
|
- [`std::ffi::OsString::into_encoded_bytes`](https://doc.rust-lang.org/stable/std/ffi/struct.OsString.html#method.into_encoded_bytes)
|
||||||
- [`std::ffi::OsStr::from_encoded_bytes_unchecked`](https://doc.rust-lang.org/stable/std/ffi/struct.OsStr.html#method.from_encoded_bytes_unchecked)
|
- [`std::ffi::OsStr::from_encoded_bytes_unchecked`](https://doc.rust-lang.org/stable/std/ffi/struct.OsStr.html#method.from_encoded_bytes_unchecked)
|
||||||
@ -77,17 +203,17 @@ These APIs are now stable in const contexts:
|
|||||||
Cargo
|
Cargo
|
||||||
-----
|
-----
|
||||||
|
|
||||||
- [fix: Set MSRV for internal packages](https://github.com/rust-lang/cargo/pull/12381/)
|
- [In `Cargo.toml`, stabilize `[lints]`](https://github.com/rust-lang/cargo/pull/12648/)
|
||||||
- [config: merge lists in precedence order](https://github.com/rust-lang/cargo/pull/12515/)
|
- [Stabilize credential-process and registry-auth](https://github.com/rust-lang/cargo/pull/12649/)
|
||||||
- [fix(update): Clarify meaning of --aggressive as --recursive](https://github.com/rust-lang/cargo/pull/12544/)
|
- [Stabilize `--keep-going` build flag](https://github.com/rust-lang/cargo/pull/12568/)
|
||||||
- [fix(update): Make `-p` more convenient by being positional](https://github.com/rust-lang/cargo/pull/12545/)
|
- [Add styling to `--help` output](https://github.com/rust-lang/cargo/pull/12578/)
|
||||||
- [feat(help): Add styling to help output ](https://github.com/rust-lang/cargo/pull/12578/)
|
- [For `cargo clean`, add `--dry-run` flag and summary line at the end](https://github.com/rust-lang/cargo/pull/12638)
|
||||||
- [feat(pkgid): Allow incomplete versions when unambigious](https://github.com/rust-lang/cargo/pull/12614/)
|
- [For `cargo update`, make `--package` more convenient by being positional](https://github.com/rust-lang/cargo/pull/12545/)
|
||||||
- [feat: stabilize credential-process and registry-auth](https://github.com/rust-lang/cargo/pull/12649/)
|
- [For `cargo update`, clarify meaning of --aggressive as --recursive](https://github.com/rust-lang/cargo/pull/12544/)
|
||||||
- [feat(cli): Add '-n' to dry-run](https://github.com/rust-lang/cargo/pull/12660/)
|
- [Add '-n' as an alias for `--dry-run`](https://github.com/rust-lang/cargo/pull/12660/)
|
||||||
|
- [Allow version-prefixes in pkgid's (e.g. `--package` flags) to resolve ambiguities](https://github.com/rust-lang/cargo/pull/12614/)
|
||||||
|
- [In `.cargo/config.toml`, merge lists in precedence order](https://github.com/rust-lang/cargo/pull/12515/)
|
||||||
- [Add support for `target.'cfg(..)'.linker`](https://github.com/rust-lang/cargo/pull/12535/)
|
- [Add support for `target.'cfg(..)'.linker`](https://github.com/rust-lang/cargo/pull/12535/)
|
||||||
- [Stabilize `--keep-going`](https://github.com/rust-lang/cargo/pull/12568/)
|
|
||||||
- [feat: Stabilize lints](https://github.com/rust-lang/cargo/pull/12648/)
|
|
||||||
|
|
||||||
<a id="1.74.0-Rustdoc"></a>
|
<a id="1.74.0-Rustdoc"></a>
|
||||||
|
|
||||||
@ -95,7 +221,6 @@ Rustdoc
|
|||||||
-------
|
-------
|
||||||
|
|
||||||
- [Add warning block support in rustdoc](https://github.com/rust-lang/rust/pull/106561/)
|
- [Add warning block support in rustdoc](https://github.com/rust-lang/rust/pull/106561/)
|
||||||
- [Accept additional user-defined syntax classes in fenced code blocks](https://github.com/rust-lang/rust/pull/110800/)
|
|
||||||
- [rustdoc-search: add support for type parameters](https://github.com/rust-lang/rust/pull/112725/)
|
- [rustdoc-search: add support for type parameters](https://github.com/rust-lang/rust/pull/112725/)
|
||||||
- [rustdoc: show inner enum and struct in type definition for concrete type](https://github.com/rust-lang/rust/pull/114855/)
|
- [rustdoc: show inner enum and struct in type definition for concrete type](https://github.com/rust-lang/rust/pull/114855/)
|
||||||
|
|
||||||
@ -108,6 +233,7 @@ Compatibility Notes
|
|||||||
- [make Cell::swap panic if the Cells partially overlap](https://github.com/rust-lang/rust/pull/114795/)
|
- [make Cell::swap panic if the Cells partially overlap](https://github.com/rust-lang/rust/pull/114795/)
|
||||||
- [Reject invalid crate names in `--extern`](https://github.com/rust-lang/rust/pull/116001/)
|
- [Reject invalid crate names in `--extern`](https://github.com/rust-lang/rust/pull/116001/)
|
||||||
- [Don't resolve generic impls that may be shadowed by dyn built-in impls](https://github.com/rust-lang/rust/pull/114941/)
|
- [Don't resolve generic impls that may be shadowed by dyn built-in impls](https://github.com/rust-lang/rust/pull/114941/)
|
||||||
|
- [The new `impl From<{&,&mut} [T; N]> for Vec<T>` is known to cause some inference failures with overly-generic code.](https://github.com/rust-lang/rust/issues/117054) In those examples using the `tui` crate, the combination of `AsRef<_>` and `Into<Vec>` leaves the middle type ambiguous, and the new `impl` adds another possibility, so it now requires an explicit type annotation.
|
||||||
|
|
||||||
<a id="1.74.0-Internal-Changes"></a>
|
<a id="1.74.0-Internal-Changes"></a>
|
||||||
|
|
||||||
@ -205,7 +331,6 @@ These APIs are now stable in const contexts:
|
|||||||
Cargo
|
Cargo
|
||||||
-----
|
-----
|
||||||
|
|
||||||
- [Encode URL params correctly for `SourceId` in `Cargo.lock`.](https://github.com/rust-lang/cargo/pull/12280/)
|
|
||||||
- [Bail out an error when using `cargo::` in custom build script.](https://github.com/rust-lang/cargo/pull/12332/)
|
- [Bail out an error when using `cargo::` in custom build script.](https://github.com/rust-lang/cargo/pull/12332/)
|
||||||
|
|
||||||
<a id="1.73.0-Misc"></a>
|
<a id="1.73.0-Misc"></a>
|
||||||
|
|||||||
@ -4,16 +4,21 @@ version = "0.0.0"
|
|||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
rustc_driver = { path = "../rustc_driver" }
|
# tidy-alphabetical-start
|
||||||
rustc_driver_impl = { path = "../rustc_driver_impl" }
|
|
||||||
|
|
||||||
# Make sure rustc_codegen_ssa ends up in the sysroot, because this
|
# Make sure rustc_codegen_ssa ends up in the sysroot, because this
|
||||||
# crate is intended to be used by codegen backends, which may not be in-tree.
|
# crate is intended to be used by codegen backends, which may not be in-tree.
|
||||||
rustc_codegen_ssa = { path = "../rustc_codegen_ssa" }
|
rustc_codegen_ssa = { path = "../rustc_codegen_ssa" }
|
||||||
|
|
||||||
|
rustc_driver = { path = "../rustc_driver" }
|
||||||
|
rustc_driver_impl = { path = "../rustc_driver_impl" }
|
||||||
|
|
||||||
# Make sure rustc_smir ends up in the sysroot, because this
|
# Make sure rustc_smir ends up in the sysroot, because this
|
||||||
# crate is intended to be used by stable MIR consumers, which are not in-tree
|
# crate is intended to be used by stable MIR consumers, which are not in-tree.
|
||||||
rustc_smir = { path = "../rustc_smir" }
|
rustc_smir = { path = "../rustc_smir" }
|
||||||
|
|
||||||
stable_mir = { path = "../stable_mir" }
|
stable_mir = { path = "../stable_mir" }
|
||||||
|
# tidy-alphabetical-end
|
||||||
|
|
||||||
[dependencies.jemalloc-sys]
|
[dependencies.jemalloc-sys]
|
||||||
version = "0.5.0"
|
version = "0.5.0"
|
||||||
@ -21,7 +26,9 @@ optional = true
|
|||||||
features = ['unprefixed_malloc_on_supported_platforms']
|
features = ['unprefixed_malloc_on_supported_platforms']
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
|
# tidy-alphabetical-start
|
||||||
jemalloc = ['jemalloc-sys']
|
jemalloc = ['jemalloc-sys']
|
||||||
llvm = ['rustc_driver_impl/llvm']
|
llvm = ['rustc_driver_impl/llvm']
|
||||||
max_level_info = ['rustc_driver_impl/max_level_info']
|
max_level_info = ['rustc_driver_impl/max_level_info']
|
||||||
rustc_use_parallel_compiler = ['rustc_driver_impl/rustc_use_parallel_compiler']
|
rustc_use_parallel_compiler = ['rustc_driver_impl/rustc_use_parallel_compiler']
|
||||||
|
# tidy-alphabetical-end
|
||||||
|
|||||||
@ -4,21 +4,27 @@ version = "0.0.0"
|
|||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
# tidy-alphabetical-start
|
||||||
bitflags = "1.2.1"
|
bitflags = "1.2.1"
|
||||||
tracing = "0.1"
|
|
||||||
rand = { version = "0.8.4", default-features = false, optional = true }
|
rand = { version = "0.8.4", default-features = false, optional = true }
|
||||||
rand_xoshiro = { version = "0.6.0", optional = true }
|
rand_xoshiro = { version = "0.6.0", optional = true }
|
||||||
rustc_data_structures = { path = "../rustc_data_structures", optional = true }
|
rustc_data_structures = { path = "../rustc_data_structures", optional = true }
|
||||||
rustc_index = { path = "../rustc_index", default-features = false }
|
rustc_index = { path = "../rustc_index", default-features = false }
|
||||||
rustc_macros = { path = "../rustc_macros", optional = true }
|
rustc_macros = { path = "../rustc_macros", optional = true }
|
||||||
rustc_serialize = { path = "../rustc_serialize", optional = true }
|
rustc_serialize = { path = "../rustc_serialize", optional = true }
|
||||||
|
tracing = "0.1"
|
||||||
|
# tidy-alphabetical-end
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
|
# tidy-alphabetical-start
|
||||||
default = ["nightly", "randomize"]
|
default = ["nightly", "randomize"]
|
||||||
randomize = ["rand", "rand_xoshiro"]
|
# rust-analyzer depends on this crate and we therefore require it to built on a stable toolchain
|
||||||
|
# without depending on rustc_data_structures, rustc_macros and rustc_serialize
|
||||||
nightly = [
|
nightly = [
|
||||||
"rustc_data_structures",
|
"rustc_data_structures",
|
||||||
"rustc_index/nightly",
|
"rustc_index/nightly",
|
||||||
"rustc_macros",
|
"rustc_macros",
|
||||||
"rustc_serialize",
|
"rustc_serialize",
|
||||||
]
|
]
|
||||||
|
randomize = ["rand", "rand_xoshiro", "nightly"]
|
||||||
|
# tidy-alphabetical-end
|
||||||
|
|||||||
@ -1,21 +1,27 @@
|
|||||||
use super::*;
|
use std::fmt::{self, Write};
|
||||||
use std::fmt::Write;
|
use std::ops::Deref;
|
||||||
use std::{borrow::Borrow, cmp, iter, ops::Bound};
|
use std::{borrow::Borrow, cmp, iter, ops::Bound};
|
||||||
|
|
||||||
#[cfg(feature = "randomize")]
|
use rustc_index::Idx;
|
||||||
use rand::{seq::SliceRandom, SeedableRng};
|
|
||||||
#[cfg(feature = "randomize")]
|
|
||||||
use rand_xoshiro::Xoshiro128StarStar;
|
|
||||||
|
|
||||||
use tracing::debug;
|
use tracing::debug;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
Abi, AbiAndPrefAlign, Align, FieldsShape, IndexSlice, IndexVec, Integer, LayoutS, Niche,
|
||||||
|
NonZeroUsize, Primitive, ReprOptions, Scalar, Size, StructKind, TagEncoding, TargetDataLayout,
|
||||||
|
Variants, WrappingRange,
|
||||||
|
};
|
||||||
|
|
||||||
pub trait LayoutCalculator {
|
pub trait LayoutCalculator {
|
||||||
type TargetDataLayoutRef: Borrow<TargetDataLayout>;
|
type TargetDataLayoutRef: Borrow<TargetDataLayout>;
|
||||||
|
|
||||||
fn delay_bug(&self, txt: String);
|
fn delay_bug(&self, txt: String);
|
||||||
fn current_data_layout(&self) -> Self::TargetDataLayoutRef;
|
fn current_data_layout(&self) -> Self::TargetDataLayoutRef;
|
||||||
|
|
||||||
fn scalar_pair(&self, a: Scalar, b: Scalar) -> LayoutS {
|
fn scalar_pair<FieldIdx: Idx, VariantIdx: Idx>(
|
||||||
|
&self,
|
||||||
|
a: Scalar,
|
||||||
|
b: Scalar,
|
||||||
|
) -> LayoutS<FieldIdx, VariantIdx> {
|
||||||
let dl = self.current_data_layout();
|
let dl = self.current_data_layout();
|
||||||
let dl = dl.borrow();
|
let dl = dl.borrow();
|
||||||
let b_align = b.align(dl);
|
let b_align = b.align(dl);
|
||||||
@ -31,7 +37,7 @@ pub trait LayoutCalculator {
|
|||||||
.max_by_key(|niche| niche.available(dl));
|
.max_by_key(|niche| niche.available(dl));
|
||||||
|
|
||||||
LayoutS {
|
LayoutS {
|
||||||
variants: Variants::Single { index: FIRST_VARIANT },
|
variants: Variants::Single { index: VariantIdx::new(0) },
|
||||||
fields: FieldsShape::Arbitrary {
|
fields: FieldsShape::Arbitrary {
|
||||||
offsets: [Size::ZERO, b_offset].into(),
|
offsets: [Size::ZERO, b_offset].into(),
|
||||||
memory_index: [0, 1].into(),
|
memory_index: [0, 1].into(),
|
||||||
@ -45,40 +51,45 @@ pub trait LayoutCalculator {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn univariant(
|
fn univariant<
|
||||||
|
'a,
|
||||||
|
FieldIdx: Idx,
|
||||||
|
VariantIdx: Idx,
|
||||||
|
F: Deref<Target = &'a LayoutS<FieldIdx, VariantIdx>> + fmt::Debug,
|
||||||
|
>(
|
||||||
&self,
|
&self,
|
||||||
dl: &TargetDataLayout,
|
dl: &TargetDataLayout,
|
||||||
fields: &IndexSlice<FieldIdx, Layout<'_>>,
|
fields: &IndexSlice<FieldIdx, F>,
|
||||||
repr: &ReprOptions,
|
repr: &ReprOptions,
|
||||||
kind: StructKind,
|
kind: StructKind,
|
||||||
) -> Option<LayoutS> {
|
) -> Option<LayoutS<FieldIdx, VariantIdx>> {
|
||||||
let layout = univariant(self, dl, fields, repr, kind, NicheBias::Start);
|
let layout = univariant(self, dl, fields, repr, kind, NicheBias::Start);
|
||||||
// Enums prefer niches close to the beginning or the end of the variants so that other (smaller)
|
// Enums prefer niches close to the beginning or the end of the variants so that other
|
||||||
// data-carrying variants can be packed into the space after/before the niche.
|
// (smaller) data-carrying variants can be packed into the space after/before the niche.
|
||||||
// If the default field ordering does not give us a niche at the front then we do a second
|
// If the default field ordering does not give us a niche at the front then we do a second
|
||||||
// run and bias niches to the right and then check which one is closer to one of the struct's
|
// run and bias niches to the right and then check which one is closer to one of the
|
||||||
// edges.
|
// struct's edges.
|
||||||
if let Some(layout) = &layout {
|
if let Some(layout) = &layout {
|
||||||
// Don't try to calculate an end-biased layout for unsizable structs,
|
// Don't try to calculate an end-biased layout for unsizable structs,
|
||||||
// otherwise we could end up with different layouts for
|
// otherwise we could end up with different layouts for
|
||||||
// Foo<Type> and Foo<dyn Trait> which would break unsizing
|
// Foo<Type> and Foo<dyn Trait> which would break unsizing.
|
||||||
if !matches!(kind, StructKind::MaybeUnsized) {
|
if !matches!(kind, StructKind::MaybeUnsized) {
|
||||||
if let Some(niche) = layout.largest_niche {
|
if let Some(niche) = layout.largest_niche {
|
||||||
let head_space = niche.offset.bytes();
|
let head_space = niche.offset.bytes();
|
||||||
let niche_length = niche.value.size(dl).bytes();
|
let niche_len = niche.value.size(dl).bytes();
|
||||||
let tail_space = layout.size.bytes() - head_space - niche_length;
|
let tail_space = layout.size.bytes() - head_space - niche_len;
|
||||||
|
|
||||||
// This may end up doing redundant work if the niche is already in the last field
|
// This may end up doing redundant work if the niche is already in the last
|
||||||
// (e.g. a trailing bool) and there is tail padding. But it's non-trivial to get
|
// field (e.g. a trailing bool) and there is tail padding. But it's non-trivial
|
||||||
// the unpadded size so we try anyway.
|
// to get the unpadded size so we try anyway.
|
||||||
if fields.len() > 1 && head_space != 0 && tail_space > 0 {
|
if fields.len() > 1 && head_space != 0 && tail_space > 0 {
|
||||||
let alt_layout = univariant(self, dl, fields, repr, kind, NicheBias::End)
|
let alt_layout = univariant(self, dl, fields, repr, kind, NicheBias::End)
|
||||||
.expect("alt layout should always work");
|
.expect("alt layout should always work");
|
||||||
let niche = alt_layout
|
let alt_niche = alt_layout
|
||||||
.largest_niche
|
.largest_niche
|
||||||
.expect("alt layout should have a niche like the regular one");
|
.expect("alt layout should have a niche like the regular one");
|
||||||
let alt_head_space = niche.offset.bytes();
|
let alt_head_space = alt_niche.offset.bytes();
|
||||||
let alt_niche_len = niche.value.size(dl).bytes();
|
let alt_niche_len = alt_niche.value.size(dl).bytes();
|
||||||
let alt_tail_space =
|
let alt_tail_space =
|
||||||
alt_layout.size.bytes() - alt_head_space - alt_niche_len;
|
alt_layout.size.bytes() - alt_head_space - alt_niche_len;
|
||||||
|
|
||||||
@ -93,7 +104,7 @@ pub trait LayoutCalculator {
|
|||||||
alt_layout: {}\n",
|
alt_layout: {}\n",
|
||||||
layout.size.bytes(),
|
layout.size.bytes(),
|
||||||
head_space,
|
head_space,
|
||||||
niche_length,
|
niche_len,
|
||||||
tail_space,
|
tail_space,
|
||||||
alt_head_space,
|
alt_head_space,
|
||||||
alt_niche_len,
|
alt_niche_len,
|
||||||
@ -114,11 +125,13 @@ pub trait LayoutCalculator {
|
|||||||
layout
|
layout
|
||||||
}
|
}
|
||||||
|
|
||||||
fn layout_of_never_type(&self) -> LayoutS {
|
fn layout_of_never_type<FieldIdx: Idx, VariantIdx: Idx>(
|
||||||
|
&self,
|
||||||
|
) -> LayoutS<FieldIdx, VariantIdx> {
|
||||||
let dl = self.current_data_layout();
|
let dl = self.current_data_layout();
|
||||||
let dl = dl.borrow();
|
let dl = dl.borrow();
|
||||||
LayoutS {
|
LayoutS {
|
||||||
variants: Variants::Single { index: FIRST_VARIANT },
|
variants: Variants::Single { index: VariantIdx::new(0) },
|
||||||
fields: FieldsShape::Primitive,
|
fields: FieldsShape::Primitive,
|
||||||
abi: Abi::Uninhabited,
|
abi: Abi::Uninhabited,
|
||||||
largest_niche: None,
|
largest_niche: None,
|
||||||
@ -129,10 +142,15 @@ pub trait LayoutCalculator {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn layout_of_struct_or_enum(
|
fn layout_of_struct_or_enum<
|
||||||
|
'a,
|
||||||
|
FieldIdx: Idx,
|
||||||
|
VariantIdx: Idx,
|
||||||
|
F: Deref<Target = &'a LayoutS<FieldIdx, VariantIdx>> + fmt::Debug,
|
||||||
|
>(
|
||||||
&self,
|
&self,
|
||||||
repr: &ReprOptions,
|
repr: &ReprOptions,
|
||||||
variants: &IndexSlice<VariantIdx, IndexVec<FieldIdx, Layout<'_>>>,
|
variants: &IndexSlice<VariantIdx, IndexVec<FieldIdx, F>>,
|
||||||
is_enum: bool,
|
is_enum: bool,
|
||||||
is_unsafe_cell: bool,
|
is_unsafe_cell: bool,
|
||||||
scalar_valid_range: (Bound<u128>, Bound<u128>),
|
scalar_valid_range: (Bound<u128>, Bound<u128>),
|
||||||
@ -140,7 +158,7 @@ pub trait LayoutCalculator {
|
|||||||
discriminants: impl Iterator<Item = (VariantIdx, i128)>,
|
discriminants: impl Iterator<Item = (VariantIdx, i128)>,
|
||||||
dont_niche_optimize_enum: bool,
|
dont_niche_optimize_enum: bool,
|
||||||
always_sized: bool,
|
always_sized: bool,
|
||||||
) -> Option<LayoutS> {
|
) -> Option<LayoutS<FieldIdx, VariantIdx>> {
|
||||||
let dl = self.current_data_layout();
|
let dl = self.current_data_layout();
|
||||||
let dl = dl.borrow();
|
let dl = dl.borrow();
|
||||||
|
|
||||||
@ -155,11 +173,11 @@ pub trait LayoutCalculator {
|
|||||||
// but *not* an encoding of the discriminant (e.g., a tag value).
|
// but *not* an encoding of the discriminant (e.g., a tag value).
|
||||||
// See issue #49298 for more details on the need to leave space
|
// See issue #49298 for more details on the need to leave space
|
||||||
// for non-ZST uninhabited data (mostly partial initialization).
|
// for non-ZST uninhabited data (mostly partial initialization).
|
||||||
let absent = |fields: &IndexSlice<FieldIdx, Layout<'_>>| {
|
let absent = |fields: &IndexSlice<FieldIdx, F>| {
|
||||||
let uninhabited = fields.iter().any(|f| f.abi().is_uninhabited());
|
let uninhabited = fields.iter().any(|f| f.abi.is_uninhabited());
|
||||||
// We cannot ignore alignment; that might lead us to entirely discard a variant and
|
// We cannot ignore alignment; that might lead us to entirely discard a variant and
|
||||||
// produce an enum that is less aligned than it should be!
|
// produce an enum that is less aligned than it should be!
|
||||||
let is_1zst = fields.iter().all(|f| f.0.is_1zst());
|
let is_1zst = fields.iter().all(|f| f.is_1zst());
|
||||||
uninhabited && is_1zst
|
uninhabited && is_1zst
|
||||||
};
|
};
|
||||||
let (present_first, present_second) = {
|
let (present_first, present_second) = {
|
||||||
@ -176,7 +194,7 @@ pub trait LayoutCalculator {
|
|||||||
}
|
}
|
||||||
// If it's a struct, still compute a layout so that we can still compute the
|
// If it's a struct, still compute a layout so that we can still compute the
|
||||||
// field offsets.
|
// field offsets.
|
||||||
None => FIRST_VARIANT,
|
None => VariantIdx::new(0),
|
||||||
};
|
};
|
||||||
|
|
||||||
let is_struct = !is_enum ||
|
let is_struct = !is_enum ||
|
||||||
@ -279,12 +297,12 @@ pub trait LayoutCalculator {
|
|||||||
// variant layouts, so we can't store them in the
|
// variant layouts, so we can't store them in the
|
||||||
// overall LayoutS. Store the overall LayoutS
|
// overall LayoutS. Store the overall LayoutS
|
||||||
// and the variant LayoutSs here until then.
|
// and the variant LayoutSs here until then.
|
||||||
struct TmpLayout {
|
struct TmpLayout<FieldIdx: Idx, VariantIdx: Idx> {
|
||||||
layout: LayoutS,
|
layout: LayoutS<FieldIdx, VariantIdx>,
|
||||||
variants: IndexVec<VariantIdx, LayoutS>,
|
variants: IndexVec<VariantIdx, LayoutS<FieldIdx, VariantIdx>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
let calculate_niche_filling_layout = || -> Option<TmpLayout> {
|
let calculate_niche_filling_layout = || -> Option<TmpLayout<FieldIdx, VariantIdx>> {
|
||||||
if dont_niche_optimize_enum {
|
if dont_niche_optimize_enum {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
@ -322,13 +340,14 @@ pub trait LayoutCalculator {
|
|||||||
let niche_variants = all_indices.clone().find(|v| needs_disc(*v)).unwrap()
|
let niche_variants = all_indices.clone().find(|v| needs_disc(*v)).unwrap()
|
||||||
..=all_indices.rev().find(|v| needs_disc(*v)).unwrap();
|
..=all_indices.rev().find(|v| needs_disc(*v)).unwrap();
|
||||||
|
|
||||||
let count = niche_variants.size_hint().1.unwrap() as u128;
|
let count =
|
||||||
|
(niche_variants.end().index() as u128 - niche_variants.start().index() as u128) + 1;
|
||||||
|
|
||||||
// Find the field with the largest niche
|
// Find the field with the largest niche
|
||||||
let (field_index, niche, (niche_start, niche_scalar)) = variants[largest_variant_index]
|
let (field_index, niche, (niche_start, niche_scalar)) = variants[largest_variant_index]
|
||||||
.iter()
|
.iter()
|
||||||
.enumerate()
|
.enumerate()
|
||||||
.filter_map(|(j, field)| Some((j, field.largest_niche()?)))
|
.filter_map(|(j, field)| Some((j, field.largest_niche?)))
|
||||||
.max_by_key(|(_, niche)| niche.available(dl))
|
.max_by_key(|(_, niche)| niche.available(dl))
|
||||||
.and_then(|(j, niche)| Some((j, niche, niche.reserve(dl, count)?)))?;
|
.and_then(|(j, niche)| Some((j, niche, niche.reserve(dl, count)?)))?;
|
||||||
let niche_offset =
|
let niche_offset =
|
||||||
@ -443,7 +462,7 @@ pub trait LayoutCalculator {
|
|||||||
let discr_type = repr.discr_type();
|
let discr_type = repr.discr_type();
|
||||||
let bits = Integer::from_attr(dl, discr_type).size().bits();
|
let bits = Integer::from_attr(dl, discr_type).size().bits();
|
||||||
for (i, mut val) in discriminants {
|
for (i, mut val) in discriminants {
|
||||||
if variants[i].iter().any(|f| f.abi().is_uninhabited()) {
|
if variants[i].iter().any(|f| f.abi.is_uninhabited()) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
if discr_type.is_signed() {
|
if discr_type.is_signed() {
|
||||||
@ -484,7 +503,7 @@ pub trait LayoutCalculator {
|
|||||||
if repr.c() {
|
if repr.c() {
|
||||||
for fields in variants {
|
for fields in variants {
|
||||||
for field in fields {
|
for field in fields {
|
||||||
prefix_align = prefix_align.max(field.align().abi);
|
prefix_align = prefix_align.max(field.align.abi);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -503,9 +522,9 @@ pub trait LayoutCalculator {
|
|||||||
// Find the first field we can't move later
|
// Find the first field we can't move later
|
||||||
// to make room for a larger discriminant.
|
// to make room for a larger discriminant.
|
||||||
for field_idx in st.fields.index_by_increasing_offset() {
|
for field_idx in st.fields.index_by_increasing_offset() {
|
||||||
let field = &field_layouts[FieldIdx::from_usize(field_idx)];
|
let field = &field_layouts[FieldIdx::new(field_idx)];
|
||||||
if !field.0.is_1zst() {
|
if !field.is_1zst() {
|
||||||
start_align = start_align.min(field.align().abi);
|
start_align = start_align.min(field.align.abi);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -520,6 +539,7 @@ pub trait LayoutCalculator {
|
|||||||
// Align the maximum variant size to the largest alignment.
|
// Align the maximum variant size to the largest alignment.
|
||||||
size = size.align_to(align.abi);
|
size = size.align_to(align.abi);
|
||||||
|
|
||||||
|
// FIXME(oli-obk): deduplicate and harden these checks
|
||||||
if size.bytes() >= dl.obj_size_bound() {
|
if size.bytes() >= dl.obj_size_bound() {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
@ -587,7 +607,7 @@ pub trait LayoutCalculator {
|
|||||||
|
|
||||||
let tag_mask = ity.size().unsigned_int_max();
|
let tag_mask = ity.size().unsigned_int_max();
|
||||||
let tag = Scalar::Initialized {
|
let tag = Scalar::Initialized {
|
||||||
value: Int(ity, signed),
|
value: Primitive::Int(ity, signed),
|
||||||
valid_range: WrappingRange {
|
valid_range: WrappingRange {
|
||||||
start: (min as u128 & tag_mask),
|
start: (min as u128 & tag_mask),
|
||||||
end: (max as u128 & tag_mask),
|
end: (max as u128 & tag_mask),
|
||||||
@ -612,7 +632,7 @@ pub trait LayoutCalculator {
|
|||||||
};
|
};
|
||||||
// We skip *all* ZST here and later check if we are good in terms of alignment.
|
// We skip *all* ZST here and later check if we are good in terms of alignment.
|
||||||
// This lets us handle some cases involving aligned ZST.
|
// This lets us handle some cases involving aligned ZST.
|
||||||
let mut fields = iter::zip(field_layouts, offsets).filter(|p| !p.0.0.is_zst());
|
let mut fields = iter::zip(field_layouts, offsets).filter(|p| !p.0.is_zst());
|
||||||
let (field, offset) = match (fields.next(), fields.next()) {
|
let (field, offset) = match (fields.next(), fields.next()) {
|
||||||
(None, None) => {
|
(None, None) => {
|
||||||
common_prim_initialized_in_all_variants = false;
|
common_prim_initialized_in_all_variants = false;
|
||||||
@ -624,7 +644,7 @@ pub trait LayoutCalculator {
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let prim = match field.abi() {
|
let prim = match field.abi {
|
||||||
Abi::Scalar(scalar) => {
|
Abi::Scalar(scalar) => {
|
||||||
common_prim_initialized_in_all_variants &=
|
common_prim_initialized_in_all_variants &=
|
||||||
matches!(scalar, Scalar::Initialized { .. });
|
matches!(scalar, Scalar::Initialized { .. });
|
||||||
@ -655,7 +675,7 @@ pub trait LayoutCalculator {
|
|||||||
// Common prim might be uninit.
|
// Common prim might be uninit.
|
||||||
Scalar::Union { value: prim }
|
Scalar::Union { value: prim }
|
||||||
};
|
};
|
||||||
let pair = self.scalar_pair(tag, prim_scalar);
|
let pair = self.scalar_pair::<FieldIdx, VariantIdx>(tag, prim_scalar);
|
||||||
let pair_offsets = match pair.fields {
|
let pair_offsets = match pair.fields {
|
||||||
FieldsShape::Arbitrary { ref offsets, ref memory_index } => {
|
FieldsShape::Arbitrary { ref offsets, ref memory_index } => {
|
||||||
assert_eq!(memory_index.raw, [0, 1]);
|
assert_eq!(memory_index.raw, [0, 1]);
|
||||||
@ -663,8 +683,8 @@ pub trait LayoutCalculator {
|
|||||||
}
|
}
|
||||||
_ => panic!(),
|
_ => panic!(),
|
||||||
};
|
};
|
||||||
if pair_offsets[FieldIdx::from_u32(0)] == Size::ZERO
|
if pair_offsets[FieldIdx::new(0)] == Size::ZERO
|
||||||
&& pair_offsets[FieldIdx::from_u32(1)] == *offset
|
&& pair_offsets[FieldIdx::new(1)] == *offset
|
||||||
&& align == pair.align
|
&& align == pair.align
|
||||||
&& size == pair.size
|
&& size == pair.size
|
||||||
{
|
{
|
||||||
@ -684,7 +704,8 @@ pub trait LayoutCalculator {
|
|||||||
// Also do not overwrite any already existing "clever" ABIs.
|
// Also do not overwrite any already existing "clever" ABIs.
|
||||||
if variant.fields.count() > 0 && matches!(variant.abi, Abi::Aggregate { .. }) {
|
if variant.fields.count() > 0 && matches!(variant.abi, Abi::Aggregate { .. }) {
|
||||||
variant.abi = abi;
|
variant.abi = abi;
|
||||||
// Also need to bump up the size and alignment, so that the entire value fits in here.
|
// Also need to bump up the size and alignment, so that the entire value fits
|
||||||
|
// in here.
|
||||||
variant.size = cmp::max(variant.size, size);
|
variant.size = cmp::max(variant.size, size);
|
||||||
variant.align.abi = cmp::max(variant.align.abi, align.abi);
|
variant.align.abi = cmp::max(variant.align.abi, align.abi);
|
||||||
}
|
}
|
||||||
@ -720,8 +741,9 @@ pub trait LayoutCalculator {
|
|||||||
// pick the layout with the larger niche; otherwise,
|
// pick the layout with the larger niche; otherwise,
|
||||||
// pick tagged as it has simpler codegen.
|
// pick tagged as it has simpler codegen.
|
||||||
use cmp::Ordering::*;
|
use cmp::Ordering::*;
|
||||||
let niche_size =
|
let niche_size = |tmp_l: &TmpLayout<FieldIdx, VariantIdx>| {
|
||||||
|tmp_l: &TmpLayout| tmp_l.layout.largest_niche.map_or(0, |n| n.available(dl));
|
tmp_l.layout.largest_niche.map_or(0, |n| n.available(dl))
|
||||||
|
};
|
||||||
match (tl.layout.size.cmp(&nl.layout.size), niche_size(&tl).cmp(&niche_size(&nl))) {
|
match (tl.layout.size.cmp(&nl.layout.size), niche_size(&tl).cmp(&niche_size(&nl))) {
|
||||||
(Greater, _) => nl,
|
(Greater, _) => nl,
|
||||||
(Equal, Less) => nl,
|
(Equal, Less) => nl,
|
||||||
@ -741,11 +763,16 @@ pub trait LayoutCalculator {
|
|||||||
Some(best_layout.layout)
|
Some(best_layout.layout)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn layout_of_union(
|
fn layout_of_union<
|
||||||
|
'a,
|
||||||
|
FieldIdx: Idx,
|
||||||
|
VariantIdx: Idx,
|
||||||
|
F: Deref<Target = &'a LayoutS<FieldIdx, VariantIdx>> + fmt::Debug,
|
||||||
|
>(
|
||||||
&self,
|
&self,
|
||||||
repr: &ReprOptions,
|
repr: &ReprOptions,
|
||||||
variants: &IndexSlice<VariantIdx, IndexVec<FieldIdx, Layout<'_>>>,
|
variants: &IndexSlice<VariantIdx, IndexVec<FieldIdx, F>>,
|
||||||
) -> Option<LayoutS> {
|
) -> Option<LayoutS<FieldIdx, VariantIdx>> {
|
||||||
let dl = self.current_data_layout();
|
let dl = self.current_data_layout();
|
||||||
let dl = dl.borrow();
|
let dl = dl.borrow();
|
||||||
let mut align = if repr.pack.is_some() { dl.i8_align } else { dl.aggregate_align };
|
let mut align = if repr.pack.is_some() { dl.i8_align } else { dl.aggregate_align };
|
||||||
@ -762,24 +789,24 @@ pub trait LayoutCalculator {
|
|||||||
};
|
};
|
||||||
|
|
||||||
let mut size = Size::ZERO;
|
let mut size = Size::ZERO;
|
||||||
let only_variant = &variants[FIRST_VARIANT];
|
let only_variant = &variants[VariantIdx::new(0)];
|
||||||
for field in only_variant {
|
for field in only_variant {
|
||||||
if field.0.is_unsized() {
|
if field.is_unsized() {
|
||||||
self.delay_bug("unsized field in union".to_string());
|
self.delay_bug("unsized field in union".to_string());
|
||||||
}
|
}
|
||||||
|
|
||||||
align = align.max(field.align());
|
align = align.max(field.align);
|
||||||
max_repr_align = max_repr_align.max(field.max_repr_align());
|
max_repr_align = max_repr_align.max(field.max_repr_align);
|
||||||
size = cmp::max(size, field.size());
|
size = cmp::max(size, field.size);
|
||||||
|
|
||||||
if field.0.is_zst() {
|
if field.is_zst() {
|
||||||
// Nothing more to do for ZST fields
|
// Nothing more to do for ZST fields
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Ok(common) = common_non_zst_abi_and_align {
|
if let Ok(common) = common_non_zst_abi_and_align {
|
||||||
// Discard valid range information and allow undef
|
// Discard valid range information and allow undef
|
||||||
let field_abi = field.abi().to_union();
|
let field_abi = field.abi.to_union();
|
||||||
|
|
||||||
if let Some((common_abi, common_align)) = common {
|
if let Some((common_abi, common_align)) = common {
|
||||||
if common_abi != field_abi {
|
if common_abi != field_abi {
|
||||||
@ -790,15 +817,14 @@ pub trait LayoutCalculator {
|
|||||||
// have the same alignment
|
// have the same alignment
|
||||||
if !matches!(common_abi, Abi::Aggregate { .. }) {
|
if !matches!(common_abi, Abi::Aggregate { .. }) {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
common_align,
|
common_align, field.align.abi,
|
||||||
field.align().abi,
|
|
||||||
"non-Aggregate field with matching ABI but differing alignment"
|
"non-Aggregate field with matching ABI but differing alignment"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// First non-ZST field: record its ABI and alignment
|
// First non-ZST field: record its ABI and alignment
|
||||||
common_non_zst_abi_and_align = Ok(Some((field_abi, field.align().abi)));
|
common_non_zst_abi_and_align = Ok(Some((field_abi, field.align.abi)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -830,7 +856,7 @@ pub trait LayoutCalculator {
|
|||||||
};
|
};
|
||||||
|
|
||||||
Some(LayoutS {
|
Some(LayoutS {
|
||||||
variants: Variants::Single { index: FIRST_VARIANT },
|
variants: Variants::Single { index: VariantIdx::new(0) },
|
||||||
fields: FieldsShape::Union(NonZeroUsize::new(only_variant.len())?),
|
fields: FieldsShape::Union(NonZeroUsize::new(only_variant.len())?),
|
||||||
abi,
|
abi,
|
||||||
largest_niche: None,
|
largest_niche: None,
|
||||||
@ -848,14 +874,19 @@ enum NicheBias {
|
|||||||
End,
|
End,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn univariant(
|
fn univariant<
|
||||||
|
'a,
|
||||||
|
FieldIdx: Idx,
|
||||||
|
VariantIdx: Idx,
|
||||||
|
F: Deref<Target = &'a LayoutS<FieldIdx, VariantIdx>> + fmt::Debug,
|
||||||
|
>(
|
||||||
this: &(impl LayoutCalculator + ?Sized),
|
this: &(impl LayoutCalculator + ?Sized),
|
||||||
dl: &TargetDataLayout,
|
dl: &TargetDataLayout,
|
||||||
fields: &IndexSlice<FieldIdx, Layout<'_>>,
|
fields: &IndexSlice<FieldIdx, F>,
|
||||||
repr: &ReprOptions,
|
repr: &ReprOptions,
|
||||||
kind: StructKind,
|
kind: StructKind,
|
||||||
niche_bias: NicheBias,
|
niche_bias: NicheBias,
|
||||||
) -> Option<LayoutS> {
|
) -> Option<LayoutS<FieldIdx, VariantIdx>> {
|
||||||
let pack = repr.pack;
|
let pack = repr.pack;
|
||||||
let mut align = if pack.is_some() { dl.i8_align } else { dl.aggregate_align };
|
let mut align = if pack.is_some() { dl.i8_align } else { dl.aggregate_align };
|
||||||
let mut max_repr_align = repr.align;
|
let mut max_repr_align = repr.align;
|
||||||
@ -868,15 +899,17 @@ fn univariant(
|
|||||||
|
|
||||||
// If `-Z randomize-layout` was enabled for the type definition we can shuffle
|
// If `-Z randomize-layout` was enabled for the type definition we can shuffle
|
||||||
// the field ordering to try and catch some code making assumptions about layouts
|
// the field ordering to try and catch some code making assumptions about layouts
|
||||||
// we don't guarantee
|
// we don't guarantee.
|
||||||
if repr.can_randomize_type_layout() && cfg!(feature = "randomize") {
|
if repr.can_randomize_type_layout() && cfg!(feature = "randomize") {
|
||||||
#[cfg(feature = "randomize")]
|
#[cfg(feature = "randomize")]
|
||||||
{
|
{
|
||||||
// `ReprOptions.layout_seed` is a deterministic seed that we can use to
|
use rand::{seq::SliceRandom, SeedableRng};
|
||||||
// randomize field ordering with
|
// `ReprOptions.layout_seed` is a deterministic seed we can use to randomize field
|
||||||
let mut rng = Xoshiro128StarStar::seed_from_u64(repr.field_shuffle_seed.as_u64());
|
// ordering.
|
||||||
|
let mut rng =
|
||||||
|
rand_xoshiro::Xoshiro128StarStar::seed_from_u64(repr.field_shuffle_seed);
|
||||||
|
|
||||||
// Shuffle the ordering of the fields
|
// Shuffle the ordering of the fields.
|
||||||
optimizing.shuffle(&mut rng);
|
optimizing.shuffle(&mut rng);
|
||||||
}
|
}
|
||||||
// Otherwise we just leave things alone and actually optimize the type's fields
|
// Otherwise we just leave things alone and actually optimize the type's fields
|
||||||
@ -884,35 +917,34 @@ fn univariant(
|
|||||||
// To allow unsizing `&Foo<Type>` -> `&Foo<dyn Trait>`, the layout of the struct must
|
// To allow unsizing `&Foo<Type>` -> `&Foo<dyn Trait>`, the layout of the struct must
|
||||||
// not depend on the layout of the tail.
|
// not depend on the layout of the tail.
|
||||||
let max_field_align =
|
let max_field_align =
|
||||||
fields_excluding_tail.iter().map(|f| f.align().abi.bytes()).max().unwrap_or(1);
|
fields_excluding_tail.iter().map(|f| f.align.abi.bytes()).max().unwrap_or(1);
|
||||||
let largest_niche_size = fields_excluding_tail
|
let largest_niche_size = fields_excluding_tail
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(|f| f.largest_niche())
|
.filter_map(|f| f.largest_niche)
|
||||||
.map(|n| n.available(dl))
|
.map(|n| n.available(dl))
|
||||||
.max()
|
.max()
|
||||||
.unwrap_or(0);
|
.unwrap_or(0);
|
||||||
|
|
||||||
// Calculates a sort key to group fields by their alignment or possibly some size-derived
|
// Calculates a sort key to group fields by their alignment or possibly some
|
||||||
// pseudo-alignment.
|
// size-derived pseudo-alignment.
|
||||||
let alignment_group_key = |layout: Layout<'_>| {
|
let alignment_group_key = |layout: &F| {
|
||||||
if let Some(pack) = pack {
|
if let Some(pack) = pack {
|
||||||
// return the packed alignment in bytes
|
// Return the packed alignment in bytes.
|
||||||
layout.align().abi.min(pack).bytes()
|
layout.align.abi.min(pack).bytes()
|
||||||
} else {
|
} else {
|
||||||
// returns log2(effective-align).
|
// Returns `log2(effective-align)`. This is ok since `pack` applies to all
|
||||||
// This is ok since `pack` applies to all fields equally.
|
// fields equally. The calculation assumes that size is an integer multiple of
|
||||||
// The calculation assumes that size is an integer multiple of align, except for ZSTs.
|
// align, except for ZSTs.
|
||||||
//
|
let align = layout.align.abi.bytes();
|
||||||
let align = layout.align().abi.bytes();
|
let size = layout.size.bytes();
|
||||||
let size = layout.size().bytes();
|
let niche_size = layout.largest_niche.map(|n| n.available(dl)).unwrap_or(0);
|
||||||
let niche_size = layout.largest_niche().map(|n| n.available(dl)).unwrap_or(0);
|
// Group [u8; 4] with align-4 or [u8; 6] with align-2 fields.
|
||||||
// group [u8; 4] with align-4 or [u8; 6] with align-2 fields
|
|
||||||
let size_as_align = align.max(size).trailing_zeros();
|
let size_as_align = align.max(size).trailing_zeros();
|
||||||
let size_as_align = if largest_niche_size > 0 {
|
let size_as_align = if largest_niche_size > 0 {
|
||||||
match niche_bias {
|
match niche_bias {
|
||||||
// Given `A(u8, [u8; 16])` and `B(bool, [u8; 16])` we want to bump the array
|
// Given `A(u8, [u8; 16])` and `B(bool, [u8; 16])` we want to bump the
|
||||||
// to the front in the first case (for aligned loads) but keep the bool in front
|
// array to the front in the first case (for aligned loads) but keep
|
||||||
// in the second case for its niches.
|
// the bool in front in the second case for its niches.
|
||||||
NicheBias::Start => max_field_align.trailing_zeros().min(size_as_align),
|
NicheBias::Start => max_field_align.trailing_zeros().min(size_as_align),
|
||||||
// When moving niches towards the end of the struct then for
|
// When moving niches towards the end of the struct then for
|
||||||
// A((u8, u8, u8, bool), (u8, bool, u8)) we want to keep the first tuple
|
// A((u8, u8, u8, bool), (u8, bool, u8)) we want to keep the first tuple
|
||||||
@ -931,18 +963,18 @@ fn univariant(
|
|||||||
|
|
||||||
match kind {
|
match kind {
|
||||||
StructKind::AlwaysSized | StructKind::MaybeUnsized => {
|
StructKind::AlwaysSized | StructKind::MaybeUnsized => {
|
||||||
// Currently `LayoutS` only exposes a single niche so sorting is usually sufficient
|
// Currently `LayoutS` only exposes a single niche so sorting is usually
|
||||||
// to get one niche into the preferred position. If it ever supported multiple niches
|
// sufficient to get one niche into the preferred position. If it ever
|
||||||
// then a more advanced pick-and-pack approach could provide better results.
|
// supported multiple niches then a more advanced pick-and-pack approach could
|
||||||
// But even for the single-niche cache it's not optimal. E.g. for
|
// provide better results. But even for the single-niche cache it's not
|
||||||
// A(u32, (bool, u8), u16) it would be possible to move the bool to the front
|
// optimal. E.g. for A(u32, (bool, u8), u16) it would be possible to move the
|
||||||
// but it would require packing the tuple together with the u16 to build a 4-byte
|
// bool to the front but it would require packing the tuple together with the
|
||||||
// group so that the u32 can be placed after it without padding. This kind
|
// u16 to build a 4-byte group so that the u32 can be placed after it without
|
||||||
// of packing can't be achieved by sorting.
|
// padding. This kind of packing can't be achieved by sorting.
|
||||||
optimizing.sort_by_key(|&x| {
|
optimizing.sort_by_key(|&x| {
|
||||||
let f = fields[x];
|
let f = &fields[x];
|
||||||
let field_size = f.size().bytes();
|
let field_size = f.size.bytes();
|
||||||
let niche_size = f.largest_niche().map_or(0, |n| n.available(dl));
|
let niche_size = f.largest_niche.map_or(0, |n| n.available(dl));
|
||||||
let niche_size_key = match niche_bias {
|
let niche_size_key = match niche_bias {
|
||||||
// large niche first
|
// large niche first
|
||||||
NicheBias::Start => !niche_size,
|
NicheBias::Start => !niche_size,
|
||||||
@ -950,8 +982,8 @@ fn univariant(
|
|||||||
NicheBias::End => niche_size,
|
NicheBias::End => niche_size,
|
||||||
};
|
};
|
||||||
let inner_niche_offset_key = match niche_bias {
|
let inner_niche_offset_key = match niche_bias {
|
||||||
NicheBias::Start => f.largest_niche().map_or(0, |n| n.offset.bytes()),
|
NicheBias::Start => f.largest_niche.map_or(0, |n| n.offset.bytes()),
|
||||||
NicheBias::End => f.largest_niche().map_or(0, |n| {
|
NicheBias::End => f.largest_niche.map_or(0, |n| {
|
||||||
!(field_size - n.value.size(dl).bytes() - n.offset.bytes())
|
!(field_size - n.value.size(dl).bytes() - n.offset.bytes())
|
||||||
}),
|
}),
|
||||||
};
|
};
|
||||||
@ -975,8 +1007,8 @@ fn univariant(
|
|||||||
// And put the largest niche in an alignment group at the end
|
// And put the largest niche in an alignment group at the end
|
||||||
// so it can be used as discriminant in jagged enums
|
// so it can be used as discriminant in jagged enums
|
||||||
optimizing.sort_by_key(|&x| {
|
optimizing.sort_by_key(|&x| {
|
||||||
let f = fields[x];
|
let f = &fields[x];
|
||||||
let niche_size = f.largest_niche().map_or(0, |n| n.available(dl));
|
let niche_size = f.largest_niche.map_or(0, |n| n.available(dl));
|
||||||
(alignment_group_key(f), niche_size)
|
(alignment_group_key(f), niche_size)
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@ -1012,24 +1044,24 @@ fn univariant(
|
|||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
if field.0.is_unsized() {
|
if field.is_unsized() {
|
||||||
sized = false;
|
sized = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Invariant: offset < dl.obj_size_bound() <= 1<<61
|
// Invariant: offset < dl.obj_size_bound() <= 1<<61
|
||||||
let field_align = if let Some(pack) = pack {
|
let field_align = if let Some(pack) = pack {
|
||||||
field.align().min(AbiAndPrefAlign::new(pack))
|
field.align.min(AbiAndPrefAlign::new(pack))
|
||||||
} else {
|
} else {
|
||||||
field.align()
|
field.align
|
||||||
};
|
};
|
||||||
offset = offset.align_to(field_align.abi);
|
offset = offset.align_to(field_align.abi);
|
||||||
align = align.max(field_align);
|
align = align.max(field_align);
|
||||||
max_repr_align = max_repr_align.max(field.max_repr_align());
|
max_repr_align = max_repr_align.max(field.max_repr_align);
|
||||||
|
|
||||||
debug!("univariant offset: {:?} field: {:#?}", offset, field);
|
debug!("univariant offset: {:?} field: {:#?}", offset, field);
|
||||||
offsets[i] = offset;
|
offsets[i] = offset;
|
||||||
|
|
||||||
if let Some(mut niche) = field.largest_niche() {
|
if let Some(mut niche) = field.largest_niche {
|
||||||
let available = niche.available(dl);
|
let available = niche.available(dl);
|
||||||
// Pick up larger niches.
|
// Pick up larger niches.
|
||||||
let prefer_new_niche = match niche_bias {
|
let prefer_new_niche = match niche_bias {
|
||||||
@ -1044,7 +1076,7 @@ fn univariant(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
offset = offset.checked_add(field.size(), dl)?;
|
offset = offset.checked_add(field.size, dl)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
// The unadjusted ABI alignment does not include repr(align), but does include repr(pack).
|
// The unadjusted ABI alignment does not include repr(align), but does include repr(pack).
|
||||||
@ -1068,16 +1100,20 @@ fn univariant(
|
|||||||
inverse_memory_index.invert_bijective_mapping()
|
inverse_memory_index.invert_bijective_mapping()
|
||||||
} else {
|
} else {
|
||||||
debug_assert!(inverse_memory_index.iter().copied().eq(fields.indices()));
|
debug_assert!(inverse_memory_index.iter().copied().eq(fields.indices()));
|
||||||
inverse_memory_index.into_iter().map(FieldIdx::as_u32).collect()
|
inverse_memory_index.into_iter().map(|it| it.index() as u32).collect()
|
||||||
};
|
};
|
||||||
let size = min_size.align_to(align.abi);
|
let size = min_size.align_to(align.abi);
|
||||||
|
// FIXME(oli-obk): deduplicate and harden these checks
|
||||||
|
if size.bytes() >= dl.obj_size_bound() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
let mut layout_of_single_non_zst_field = None;
|
let mut layout_of_single_non_zst_field = None;
|
||||||
let mut abi = Abi::Aggregate { sized };
|
let mut abi = Abi::Aggregate { sized };
|
||||||
// Try to make this a Scalar/ScalarPair.
|
// Try to make this a Scalar/ScalarPair.
|
||||||
if sized && size.bytes() > 0 {
|
if sized && size.bytes() > 0 {
|
||||||
// We skip *all* ZST here and later check if we are good in terms of alignment.
|
// We skip *all* ZST here and later check if we are good in terms of alignment.
|
||||||
// This lets us handle some cases involving aligned ZST.
|
// This lets us handle some cases involving aligned ZST.
|
||||||
let mut non_zst_fields = fields.iter_enumerated().filter(|&(_, f)| !f.0.is_zst());
|
let mut non_zst_fields = fields.iter_enumerated().filter(|&(_, f)| !f.is_zst());
|
||||||
|
|
||||||
match (non_zst_fields.next(), non_zst_fields.next(), non_zst_fields.next()) {
|
match (non_zst_fields.next(), non_zst_fields.next(), non_zst_fields.next()) {
|
||||||
// We have exactly one non-ZST field.
|
// We have exactly one non-ZST field.
|
||||||
@ -1085,18 +1121,17 @@ fn univariant(
|
|||||||
layout_of_single_non_zst_field = Some(field);
|
layout_of_single_non_zst_field = Some(field);
|
||||||
|
|
||||||
// Field fills the struct and it has a scalar or scalar pair ABI.
|
// Field fills the struct and it has a scalar or scalar pair ABI.
|
||||||
if offsets[i].bytes() == 0 && align.abi == field.align().abi && size == field.size()
|
if offsets[i].bytes() == 0 && align.abi == field.align.abi && size == field.size {
|
||||||
{
|
match field.abi {
|
||||||
match field.abi() {
|
|
||||||
// For plain scalars, or vectors of them, we can't unpack
|
// For plain scalars, or vectors of them, we can't unpack
|
||||||
// newtypes for `#[repr(C)]`, as that affects C ABIs.
|
// newtypes for `#[repr(C)]`, as that affects C ABIs.
|
||||||
Abi::Scalar(_) | Abi::Vector { .. } if optimize => {
|
Abi::Scalar(_) | Abi::Vector { .. } if optimize => {
|
||||||
abi = field.abi();
|
abi = field.abi;
|
||||||
}
|
}
|
||||||
// But scalar pairs are Rust-specific and get
|
// But scalar pairs are Rust-specific and get
|
||||||
// treated as aggregates by C ABIs anyway.
|
// treated as aggregates by C ABIs anyway.
|
||||||
Abi::ScalarPair(..) => {
|
Abi::ScalarPair(..) => {
|
||||||
abi = field.abi();
|
abi = field.abi;
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
@ -1105,7 +1140,7 @@ fn univariant(
|
|||||||
|
|
||||||
// Two non-ZST fields, and they're both scalars.
|
// Two non-ZST fields, and they're both scalars.
|
||||||
(Some((i, a)), Some((j, b)), None) => {
|
(Some((i, a)), Some((j, b)), None) => {
|
||||||
match (a.abi(), b.abi()) {
|
match (a.abi, b.abi) {
|
||||||
(Abi::Scalar(a), Abi::Scalar(b)) => {
|
(Abi::Scalar(a), Abi::Scalar(b)) => {
|
||||||
// Order by the memory placement, not source order.
|
// Order by the memory placement, not source order.
|
||||||
let ((i, a), (j, b)) = if offsets[i] < offsets[j] {
|
let ((i, a), (j, b)) = if offsets[i] < offsets[j] {
|
||||||
@ -1113,7 +1148,7 @@ fn univariant(
|
|||||||
} else {
|
} else {
|
||||||
((j, b), (i, a))
|
((j, b), (i, a))
|
||||||
};
|
};
|
||||||
let pair = this.scalar_pair(a, b);
|
let pair = this.scalar_pair::<FieldIdx, VariantIdx>(a, b);
|
||||||
let pair_offsets = match pair.fields {
|
let pair_offsets = match pair.fields {
|
||||||
FieldsShape::Arbitrary { ref offsets, ref memory_index } => {
|
FieldsShape::Arbitrary { ref offsets, ref memory_index } => {
|
||||||
assert_eq!(memory_index.raw, [0, 1]);
|
assert_eq!(memory_index.raw, [0, 1]);
|
||||||
@ -1121,8 +1156,8 @@ fn univariant(
|
|||||||
}
|
}
|
||||||
_ => panic!(),
|
_ => panic!(),
|
||||||
};
|
};
|
||||||
if offsets[i] == pair_offsets[FieldIdx::from_usize(0)]
|
if offsets[i] == pair_offsets[FieldIdx::new(0)]
|
||||||
&& offsets[j] == pair_offsets[FieldIdx::from_usize(1)]
|
&& offsets[j] == pair_offsets[FieldIdx::new(1)]
|
||||||
&& align == pair.align
|
&& align == pair.align
|
||||||
&& size == pair.size
|
&& size == pair.size
|
||||||
{
|
{
|
||||||
@ -1138,13 +1173,13 @@ fn univariant(
|
|||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if fields.iter().any(|f| f.abi().is_uninhabited()) {
|
if fields.iter().any(|f| f.abi.is_uninhabited()) {
|
||||||
abi = Abi::Uninhabited;
|
abi = Abi::Uninhabited;
|
||||||
}
|
}
|
||||||
|
|
||||||
let unadjusted_abi_align = if repr.transparent() {
|
let unadjusted_abi_align = if repr.transparent() {
|
||||||
match layout_of_single_non_zst_field {
|
match layout_of_single_non_zst_field {
|
||||||
Some(l) => l.unadjusted_abi_align(),
|
Some(l) => l.unadjusted_abi_align,
|
||||||
None => {
|
None => {
|
||||||
// `repr(transparent)` with all ZST fields.
|
// `repr(transparent)` with all ZST fields.
|
||||||
align.abi
|
align.abi
|
||||||
@ -1155,7 +1190,7 @@ fn univariant(
|
|||||||
};
|
};
|
||||||
|
|
||||||
Some(LayoutS {
|
Some(LayoutS {
|
||||||
variants: Variants::Single { index: FIRST_VARIANT },
|
variants: Variants::Single { index: VariantIdx::new(0) },
|
||||||
fields: FieldsShape::Arbitrary { offsets, memory_index },
|
fields: FieldsShape::Arbitrary { offsets, memory_index },
|
||||||
abi,
|
abi,
|
||||||
largest_niche,
|
largest_niche,
|
||||||
@ -1166,17 +1201,22 @@ fn univariant(
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn format_field_niches(
|
fn format_field_niches<
|
||||||
layout: &LayoutS,
|
'a,
|
||||||
fields: &IndexSlice<FieldIdx, Layout<'_>>,
|
FieldIdx: Idx,
|
||||||
|
VariantIdx: Idx,
|
||||||
|
F: Deref<Target = &'a LayoutS<FieldIdx, VariantIdx>> + fmt::Debug,
|
||||||
|
>(
|
||||||
|
layout: &LayoutS<FieldIdx, VariantIdx>,
|
||||||
|
fields: &IndexSlice<FieldIdx, F>,
|
||||||
dl: &TargetDataLayout,
|
dl: &TargetDataLayout,
|
||||||
) -> String {
|
) -> String {
|
||||||
let mut s = String::new();
|
let mut s = String::new();
|
||||||
for i in layout.fields.index_by_increasing_offset() {
|
for i in layout.fields.index_by_increasing_offset() {
|
||||||
let offset = layout.fields.offset(i);
|
let offset = layout.fields.offset(i);
|
||||||
let f = fields[i.into()];
|
let f = &fields[FieldIdx::new(i)];
|
||||||
write!(s, "[o{}a{}s{}", offset.bytes(), f.align().abi.bytes(), f.size().bytes()).unwrap();
|
write!(s, "[o{}a{}s{}", offset.bytes(), f.align.abi.bytes(), f.size.bytes()).unwrap();
|
||||||
if let Some(n) = f.largest_niche() {
|
if let Some(n) = f.largest_niche {
|
||||||
write!(
|
write!(
|
||||||
s,
|
s,
|
||||||
" n{}b{}s{}",
|
" n{}b{}s{}",
|
||||||
|
|||||||
@ -1,23 +1,24 @@
|
|||||||
#![cfg_attr(feature = "nightly", feature(step_trait, rustc_attrs, min_specialization))]
|
#![cfg_attr(feature = "nightly", feature(step_trait))]
|
||||||
#![cfg_attr(feature = "nightly", allow(internal_features))]
|
#![cfg_attr(feature = "nightly", allow(internal_features))]
|
||||||
|
#![cfg_attr(all(not(bootstrap), feature = "nightly"), doc(rust_logo))]
|
||||||
|
#![cfg_attr(all(not(bootstrap), feature = "nightly"), feature(rustdoc_internals))]
|
||||||
|
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
#[cfg(feature = "nightly")]
|
|
||||||
use std::iter::Step;
|
|
||||||
use std::num::{NonZeroUsize, ParseIntError};
|
use std::num::{NonZeroUsize, ParseIntError};
|
||||||
use std::ops::{Add, AddAssign, Mul, RangeInclusive, Sub};
|
use std::ops::{Add, AddAssign, Mul, RangeInclusive, Sub};
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
use bitflags::bitflags;
|
use bitflags::bitflags;
|
||||||
use rustc_data_structures::intern::Interned;
|
use rustc_index::{Idx, IndexSlice, IndexVec};
|
||||||
use rustc_data_structures::stable_hasher::Hash64;
|
|
||||||
#[cfg(feature = "nightly")]
|
#[cfg(feature = "nightly")]
|
||||||
use rustc_data_structures::stable_hasher::StableOrd;
|
use rustc_data_structures::stable_hasher::StableOrd;
|
||||||
use rustc_index::{IndexSlice, IndexVec};
|
|
||||||
#[cfg(feature = "nightly")]
|
#[cfg(feature = "nightly")]
|
||||||
use rustc_macros::HashStable_Generic;
|
use rustc_macros::HashStable_Generic;
|
||||||
#[cfg(feature = "nightly")]
|
#[cfg(feature = "nightly")]
|
||||||
use rustc_macros::{Decodable, Encodable};
|
use rustc_macros::{Decodable, Encodable};
|
||||||
|
#[cfg(feature = "nightly")]
|
||||||
|
use std::iter::Step;
|
||||||
|
|
||||||
mod layout;
|
mod layout;
|
||||||
|
|
||||||
@ -28,9 +29,6 @@ pub use layout::LayoutCalculator;
|
|||||||
/// instead of implementing everything in `rustc_middle`.
|
/// instead of implementing everything in `rustc_middle`.
|
||||||
pub trait HashStableContext {}
|
pub trait HashStableContext {}
|
||||||
|
|
||||||
use Integer::*;
|
|
||||||
use Primitive::*;
|
|
||||||
|
|
||||||
bitflags! {
|
bitflags! {
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
#[cfg_attr(feature = "nightly", derive(Encodable, Decodable, HashStable_Generic))]
|
#[cfg_attr(feature = "nightly", derive(Encodable, Decodable, HashStable_Generic))]
|
||||||
@ -53,10 +51,11 @@ bitflags! {
|
|||||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
||||||
#[cfg_attr(feature = "nightly", derive(Encodable, Decodable, HashStable_Generic))]
|
#[cfg_attr(feature = "nightly", derive(Encodable, Decodable, HashStable_Generic))]
|
||||||
pub enum IntegerType {
|
pub enum IntegerType {
|
||||||
/// Pointer sized integer type, i.e. isize and usize. The field shows signedness, that
|
/// Pointer-sized integer type, i.e. `isize` and `usize`. The field shows signedness, e.g.
|
||||||
/// is, `Pointer(true)` is isize.
|
/// `Pointer(true)` means `isize`.
|
||||||
Pointer(bool),
|
Pointer(bool),
|
||||||
/// Fix sized integer type, e.g. i8, u32, i128 The bool field shows signedness, `Fixed(I8, false)` means `u8`
|
/// Fixed-sized integer type, e.g. `i8`, `u32`, `i128`. The bool field shows signedness, e.g.
|
||||||
|
/// `Fixed(I8, false)` means `u8`.
|
||||||
Fixed(Integer, bool),
|
Fixed(Integer, bool),
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -69,7 +68,7 @@ impl IntegerType {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Represents the repr options provided by the user,
|
/// Represents the repr options provided by the user.
|
||||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Default)]
|
#[derive(Copy, Clone, Debug, Eq, PartialEq, Default)]
|
||||||
#[cfg_attr(feature = "nightly", derive(Encodable, Decodable, HashStable_Generic))]
|
#[cfg_attr(feature = "nightly", derive(Encodable, Decodable, HashStable_Generic))]
|
||||||
pub struct ReprOptions {
|
pub struct ReprOptions {
|
||||||
@ -79,12 +78,12 @@ pub struct ReprOptions {
|
|||||||
pub flags: ReprFlags,
|
pub flags: ReprFlags,
|
||||||
/// The seed to be used for randomizing a type's layout
|
/// The seed to be used for randomizing a type's layout
|
||||||
///
|
///
|
||||||
/// Note: This could technically be a `Hash128` which would
|
/// Note: This could technically be a `u128` which would
|
||||||
/// be the "most accurate" hash as it'd encompass the item and crate
|
/// be the "most accurate" hash as it'd encompass the item and crate
|
||||||
/// hash without loss, but it does pay the price of being larger.
|
/// hash without loss, but it does pay the price of being larger.
|
||||||
/// Everything's a tradeoff, a 64-bit seed should be sufficient for our
|
/// Everything's a tradeoff, a 64-bit seed should be sufficient for our
|
||||||
/// purposes (primarily `-Z randomize-layout`)
|
/// purposes (primarily `-Z randomize-layout`)
|
||||||
pub field_shuffle_seed: Hash64,
|
pub field_shuffle_seed: u64,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ReprOptions {
|
impl ReprOptions {
|
||||||
@ -139,7 +138,7 @@ impl ReprOptions {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Returns `true` if this type is valid for reordering and `-Z randomize-layout`
|
/// Returns `true` if this type is valid for reordering and `-Z randomize-layout`
|
||||||
/// was enabled for its declaration crate
|
/// was enabled for its declaration crate.
|
||||||
pub fn can_randomize_type_layout(&self) -> bool {
|
pub fn can_randomize_type_layout(&self) -> bool {
|
||||||
!self.inhibit_struct_field_reordering_opt()
|
!self.inhibit_struct_field_reordering_opt()
|
||||||
&& self.flags.contains(ReprFlags::RANDOMIZE_LAYOUT)
|
&& self.flags.contains(ReprFlags::RANDOMIZE_LAYOUT)
|
||||||
@ -217,7 +216,8 @@ pub enum TargetDataLayoutErrors<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl TargetDataLayout {
|
impl TargetDataLayout {
|
||||||
/// Parse data layout from an [llvm data layout string](https://llvm.org/docs/LangRef.html#data-layout)
|
/// Parse data layout from an
|
||||||
|
/// [llvm data layout string](https://llvm.org/docs/LangRef.html#data-layout)
|
||||||
///
|
///
|
||||||
/// This function doesn't fill `c_enum_min_size` and it will always be `I32` since it can not be
|
/// This function doesn't fill `c_enum_min_size` and it will always be `I32` since it can not be
|
||||||
/// determined from llvm string.
|
/// determined from llvm string.
|
||||||
@ -242,10 +242,11 @@ impl TargetDataLayout {
|
|||||||
};
|
};
|
||||||
|
|
||||||
// Parse a size string.
|
// Parse a size string.
|
||||||
let size = |s: &'a str, cause: &'a str| parse_bits(s, "size", cause).map(Size::from_bits);
|
let parse_size =
|
||||||
|
|s: &'a str, cause: &'a str| parse_bits(s, "size", cause).map(Size::from_bits);
|
||||||
|
|
||||||
// Parse an alignment string.
|
// Parse an alignment string.
|
||||||
let align = |s: &[&'a str], cause: &'a str| {
|
let parse_align = |s: &[&'a str], cause: &'a str| {
|
||||||
if s.is_empty() {
|
if s.is_empty() {
|
||||||
return Err(TargetDataLayoutErrors::MissingAlignment { cause });
|
return Err(TargetDataLayoutErrors::MissingAlignment { cause });
|
||||||
}
|
}
|
||||||
@ -269,22 +270,22 @@ impl TargetDataLayout {
|
|||||||
[p] if p.starts_with('P') => {
|
[p] if p.starts_with('P') => {
|
||||||
dl.instruction_address_space = parse_address_space(&p[1..], "P")?
|
dl.instruction_address_space = parse_address_space(&p[1..], "P")?
|
||||||
}
|
}
|
||||||
["a", ref a @ ..] => dl.aggregate_align = align(a, "a")?,
|
["a", ref a @ ..] => dl.aggregate_align = parse_align(a, "a")?,
|
||||||
["f32", ref a @ ..] => dl.f32_align = align(a, "f32")?,
|
["f32", ref a @ ..] => dl.f32_align = parse_align(a, "f32")?,
|
||||||
["f64", ref a @ ..] => dl.f64_align = align(a, "f64")?,
|
["f64", ref a @ ..] => dl.f64_align = parse_align(a, "f64")?,
|
||||||
// FIXME(erikdesjardins): we should be parsing nonzero address spaces
|
// FIXME(erikdesjardins): we should be parsing nonzero address spaces
|
||||||
// this will require replacing TargetDataLayout::{pointer_size,pointer_align}
|
// this will require replacing TargetDataLayout::{pointer_size,pointer_align}
|
||||||
// with e.g. `fn pointer_size_in(AddressSpace)`
|
// with e.g. `fn pointer_size_in(AddressSpace)`
|
||||||
[p @ "p", s, ref a @ ..] | [p @ "p0", s, ref a @ ..] => {
|
[p @ "p", s, ref a @ ..] | [p @ "p0", s, ref a @ ..] => {
|
||||||
dl.pointer_size = size(s, p)?;
|
dl.pointer_size = parse_size(s, p)?;
|
||||||
dl.pointer_align = align(a, p)?;
|
dl.pointer_align = parse_align(a, p)?;
|
||||||
}
|
}
|
||||||
[s, ref a @ ..] if s.starts_with('i') => {
|
[s, ref a @ ..] if s.starts_with('i') => {
|
||||||
let Ok(bits) = s[1..].parse::<u64>() else {
|
let Ok(bits) = s[1..].parse::<u64>() else {
|
||||||
size(&s[1..], "i")?; // For the user error.
|
parse_size(&s[1..], "i")?; // For the user error.
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
let a = align(a, s)?;
|
let a = parse_align(a, s)?;
|
||||||
match bits {
|
match bits {
|
||||||
1 => dl.i1_align = a,
|
1 => dl.i1_align = a,
|
||||||
8 => dl.i8_align = a,
|
8 => dl.i8_align = a,
|
||||||
@ -301,8 +302,8 @@ impl TargetDataLayout {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
[s, ref a @ ..] if s.starts_with('v') => {
|
[s, ref a @ ..] if s.starts_with('v') => {
|
||||||
let v_size = size(&s[1..], "v")?;
|
let v_size = parse_size(&s[1..], "v")?;
|
||||||
let a = align(a, s)?;
|
let a = parse_align(a, s)?;
|
||||||
if let Some(v) = dl.vector_align.iter_mut().find(|v| v.0 == v_size) {
|
if let Some(v) = dl.vector_align.iter_mut().find(|v| v.0 == v_size) {
|
||||||
v.1 = a;
|
v.1 = a;
|
||||||
continue;
|
continue;
|
||||||
@ -339,6 +340,7 @@ impl TargetDataLayout {
|
|||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn ptr_sized_integer(&self) -> Integer {
|
pub fn ptr_sized_integer(&self) -> Integer {
|
||||||
|
use Integer::*;
|
||||||
match self.pointer_size.bits() {
|
match self.pointer_size.bits() {
|
||||||
16 => I16,
|
16 => I16,
|
||||||
32 => I32,
|
32 => I32,
|
||||||
@ -680,6 +682,7 @@ impl fmt::Display for AlignFromBytesError {
|
|||||||
|
|
||||||
impl Align {
|
impl Align {
|
||||||
pub const ONE: Align = Align { pow2: 0 };
|
pub const ONE: Align = Align { pow2: 0 };
|
||||||
|
// LLVM has a maximal supported alignment of 2^29, we inherit that.
|
||||||
pub const MAX: Align = Align { pow2: 29 };
|
pub const MAX: Align = Align { pow2: 29 };
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
@ -747,7 +750,6 @@ impl Align {
|
|||||||
/// A pair of alignments, ABI-mandated and preferred.
|
/// A pair of alignments, ABI-mandated and preferred.
|
||||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||||
#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
|
#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
|
||||||
|
|
||||||
pub struct AbiAndPrefAlign {
|
pub struct AbiAndPrefAlign {
|
||||||
pub abi: Align,
|
pub abi: Align,
|
||||||
pub pref: Align,
|
pub pref: Align,
|
||||||
@ -773,7 +775,6 @@ impl AbiAndPrefAlign {
|
|||||||
/// Integers, also used for enum discriminants.
|
/// Integers, also used for enum discriminants.
|
||||||
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
|
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
|
||||||
#[cfg_attr(feature = "nightly", derive(Encodable, Decodable, HashStable_Generic))]
|
#[cfg_attr(feature = "nightly", derive(Encodable, Decodable, HashStable_Generic))]
|
||||||
|
|
||||||
pub enum Integer {
|
pub enum Integer {
|
||||||
I8,
|
I8,
|
||||||
I16,
|
I16,
|
||||||
@ -785,6 +786,7 @@ pub enum Integer {
|
|||||||
impl Integer {
|
impl Integer {
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn size(self) -> Size {
|
pub fn size(self) -> Size {
|
||||||
|
use Integer::*;
|
||||||
match self {
|
match self {
|
||||||
I8 => Size::from_bytes(1),
|
I8 => Size::from_bytes(1),
|
||||||
I16 => Size::from_bytes(2),
|
I16 => Size::from_bytes(2),
|
||||||
@ -805,6 +807,7 @@ impl Integer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn align<C: HasDataLayout>(self, cx: &C) -> AbiAndPrefAlign {
|
pub fn align<C: HasDataLayout>(self, cx: &C) -> AbiAndPrefAlign {
|
||||||
|
use Integer::*;
|
||||||
let dl = cx.data_layout();
|
let dl = cx.data_layout();
|
||||||
|
|
||||||
match self {
|
match self {
|
||||||
@ -819,6 +822,7 @@ impl Integer {
|
|||||||
/// Returns the largest signed value that can be represented by this Integer.
|
/// Returns the largest signed value that can be represented by this Integer.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn signed_max(self) -> i128 {
|
pub fn signed_max(self) -> i128 {
|
||||||
|
use Integer::*;
|
||||||
match self {
|
match self {
|
||||||
I8 => i8::MAX as i128,
|
I8 => i8::MAX as i128,
|
||||||
I16 => i16::MAX as i128,
|
I16 => i16::MAX as i128,
|
||||||
@ -831,6 +835,7 @@ impl Integer {
|
|||||||
/// Finds the smallest Integer type which can represent the signed value.
|
/// Finds the smallest Integer type which can represent the signed value.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn fit_signed(x: i128) -> Integer {
|
pub fn fit_signed(x: i128) -> Integer {
|
||||||
|
use Integer::*;
|
||||||
match x {
|
match x {
|
||||||
-0x0000_0000_0000_0080..=0x0000_0000_0000_007f => I8,
|
-0x0000_0000_0000_0080..=0x0000_0000_0000_007f => I8,
|
||||||
-0x0000_0000_0000_8000..=0x0000_0000_0000_7fff => I16,
|
-0x0000_0000_0000_8000..=0x0000_0000_0000_7fff => I16,
|
||||||
@ -843,6 +848,7 @@ impl Integer {
|
|||||||
/// Finds the smallest Integer type which can represent the unsigned value.
|
/// Finds the smallest Integer type which can represent the unsigned value.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn fit_unsigned(x: u128) -> Integer {
|
pub fn fit_unsigned(x: u128) -> Integer {
|
||||||
|
use Integer::*;
|
||||||
match x {
|
match x {
|
||||||
0..=0x0000_0000_0000_00ff => I8,
|
0..=0x0000_0000_0000_00ff => I8,
|
||||||
0..=0x0000_0000_0000_ffff => I16,
|
0..=0x0000_0000_0000_ffff => I16,
|
||||||
@ -854,6 +860,7 @@ impl Integer {
|
|||||||
|
|
||||||
/// Finds the smallest integer with the given alignment.
|
/// Finds the smallest integer with the given alignment.
|
||||||
pub fn for_align<C: HasDataLayout>(cx: &C, wanted: Align) -> Option<Integer> {
|
pub fn for_align<C: HasDataLayout>(cx: &C, wanted: Align) -> Option<Integer> {
|
||||||
|
use Integer::*;
|
||||||
let dl = cx.data_layout();
|
let dl = cx.data_layout();
|
||||||
|
|
||||||
[I8, I16, I32, I64, I128].into_iter().find(|&candidate| {
|
[I8, I16, I32, I64, I128].into_iter().find(|&candidate| {
|
||||||
@ -863,6 +870,7 @@ impl Integer {
|
|||||||
|
|
||||||
/// Find the largest integer with the given alignment or less.
|
/// Find the largest integer with the given alignment or less.
|
||||||
pub fn approximate_align<C: HasDataLayout>(cx: &C, wanted: Align) -> Integer {
|
pub fn approximate_align<C: HasDataLayout>(cx: &C, wanted: Align) -> Integer {
|
||||||
|
use Integer::*;
|
||||||
let dl = cx.data_layout();
|
let dl = cx.data_layout();
|
||||||
|
|
||||||
// FIXME(eddyb) maybe include I128 in the future, when it works everywhere.
|
// FIXME(eddyb) maybe include I128 in the future, when it works everywhere.
|
||||||
@ -908,6 +916,7 @@ pub enum Primitive {
|
|||||||
|
|
||||||
impl Primitive {
|
impl Primitive {
|
||||||
pub fn size<C: HasDataLayout>(self, cx: &C) -> Size {
|
pub fn size<C: HasDataLayout>(self, cx: &C) -> Size {
|
||||||
|
use Primitive::*;
|
||||||
let dl = cx.data_layout();
|
let dl = cx.data_layout();
|
||||||
|
|
||||||
match self {
|
match self {
|
||||||
@ -922,6 +931,7 @@ impl Primitive {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn align<C: HasDataLayout>(self, cx: &C) -> AbiAndPrefAlign {
|
pub fn align<C: HasDataLayout>(self, cx: &C) -> AbiAndPrefAlign {
|
||||||
|
use Primitive::*;
|
||||||
let dl = cx.data_layout();
|
let dl = cx.data_layout();
|
||||||
|
|
||||||
match self {
|
match self {
|
||||||
@ -937,8 +947,7 @@ impl Primitive {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Inclusive wrap-around range of valid values, that is, if
|
/// Inclusive wrap-around range of valid values, that is, if
|
||||||
/// start > end, it represents `start..=MAX`,
|
/// start > end, it represents `start..=MAX`, followed by `0..=end`.
|
||||||
/// followed by `0..=end`.
|
|
||||||
///
|
///
|
||||||
/// That is, for an i8 primitive, a range of `254..=2` means following
|
/// That is, for an i8 primitive, a range of `254..=2` means following
|
||||||
/// sequence:
|
/// sequence:
|
||||||
@ -970,21 +979,21 @@ impl WrappingRange {
|
|||||||
|
|
||||||
/// Returns `self` with replaced `start`
|
/// Returns `self` with replaced `start`
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
pub fn with_start(mut self, start: u128) -> Self {
|
fn with_start(mut self, start: u128) -> Self {
|
||||||
self.start = start;
|
self.start = start;
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns `self` with replaced `end`
|
/// Returns `self` with replaced `end`
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
pub fn with_end(mut self, end: u128) -> Self {
|
fn with_end(mut self, end: u128) -> Self {
|
||||||
self.end = end;
|
self.end = end;
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns `true` if `size` completely fills the range.
|
/// Returns `true` if `size` completely fills the range.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn is_full_for(&self, size: Size) -> bool {
|
fn is_full_for(&self, size: Size) -> bool {
|
||||||
let max_value = size.unsigned_int_max();
|
let max_value = size.unsigned_int_max();
|
||||||
debug_assert!(self.start <= max_value && self.end <= max_value);
|
debug_assert!(self.start <= max_value && self.end <= max_value);
|
||||||
self.start == (self.end.wrapping_add(1) & max_value)
|
self.start == (self.end.wrapping_add(1) & max_value)
|
||||||
@ -1027,10 +1036,11 @@ pub enum Scalar {
|
|||||||
impl Scalar {
|
impl Scalar {
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn is_bool(&self) -> bool {
|
pub fn is_bool(&self) -> bool {
|
||||||
|
use Integer::*;
|
||||||
matches!(
|
matches!(
|
||||||
self,
|
self,
|
||||||
Scalar::Initialized {
|
Scalar::Initialized {
|
||||||
value: Int(I8, false),
|
value: Primitive::Int(I8, false),
|
||||||
valid_range: WrappingRange { start: 0, end: 1 }
|
valid_range: WrappingRange { start: 0, end: 1 }
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
@ -1066,7 +1076,8 @@ impl Scalar {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
/// Allows the caller to mutate the valid range. This operation will panic if attempted on a union.
|
/// Allows the caller to mutate the valid range. This operation will panic if attempted on a
|
||||||
|
/// union.
|
||||||
pub fn valid_range_mut(&mut self) -> &mut WrappingRange {
|
pub fn valid_range_mut(&mut self) -> &mut WrappingRange {
|
||||||
match self {
|
match self {
|
||||||
Scalar::Initialized { valid_range, .. } => valid_range,
|
Scalar::Initialized { valid_range, .. } => valid_range,
|
||||||
@ -1074,7 +1085,8 @@ impl Scalar {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns `true` if all possible numbers are valid, i.e `valid_range` covers the whole layout
|
/// Returns `true` if all possible numbers are valid, i.e `valid_range` covers the whole
|
||||||
|
/// layout.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn is_always_valid<C: HasDataLayout>(&self, cx: &C) -> bool {
|
pub fn is_always_valid<C: HasDataLayout>(&self, cx: &C) -> bool {
|
||||||
match *self {
|
match *self {
|
||||||
@ -1093,36 +1105,11 @@ impl Scalar {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
rustc_index::newtype_index! {
|
// NOTE: This struct is generic over the FieldIdx for rust-analyzer usage.
|
||||||
/// The *source-order* index of a field in a variant.
|
|
||||||
///
|
|
||||||
/// This is how most code after type checking refers to fields, rather than
|
|
||||||
/// using names (as names have hygiene complications and more complex lookup).
|
|
||||||
///
|
|
||||||
/// Particularly for `repr(Rust)` types, this may not be the same as *layout* order.
|
|
||||||
/// (It is for `repr(C)` `struct`s, however.)
|
|
||||||
///
|
|
||||||
/// For example, in the following types,
|
|
||||||
/// ```rust
|
|
||||||
/// # enum Never {}
|
|
||||||
/// # #[repr(u16)]
|
|
||||||
/// enum Demo1 {
|
|
||||||
/// Variant0 { a: Never, b: i32 } = 100,
|
|
||||||
/// Variant1 { c: u8, d: u64 } = 10,
|
|
||||||
/// }
|
|
||||||
/// struct Demo2 { e: u8, f: u16, g: u8 }
|
|
||||||
/// ```
|
|
||||||
/// `b` is `FieldIdx(1)` in `VariantIdx(0)`,
|
|
||||||
/// `d` is `FieldIdx(1)` in `VariantIdx(1)`, and
|
|
||||||
/// `f` is `FieldIdx(1)` in `VariantIdx(0)`.
|
|
||||||
#[derive(HashStable_Generic)]
|
|
||||||
pub struct FieldIdx {}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Describes how the fields of a type are located in memory.
|
/// Describes how the fields of a type are located in memory.
|
||||||
#[derive(PartialEq, Eq, Hash, Clone, Debug)]
|
#[derive(PartialEq, Eq, Hash, Clone, Debug)]
|
||||||
#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
|
#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
|
||||||
pub enum FieldsShape {
|
pub enum FieldsShape<FieldIdx: Idx> {
|
||||||
/// Scalar primitives and `!`, which never have fields.
|
/// Scalar primitives and `!`, which never have fields.
|
||||||
Primitive,
|
Primitive,
|
||||||
|
|
||||||
@ -1162,7 +1149,7 @@ pub enum FieldsShape {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FieldsShape {
|
impl<FieldIdx: Idx> FieldsShape<FieldIdx> {
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn count(&self) -> usize {
|
pub fn count(&self) -> usize {
|
||||||
match *self {
|
match *self {
|
||||||
@ -1188,7 +1175,7 @@ impl FieldsShape {
|
|||||||
assert!(i < count, "tried to access field {i} of array with {count} fields");
|
assert!(i < count, "tried to access field {i} of array with {count} fields");
|
||||||
stride * i
|
stride * i
|
||||||
}
|
}
|
||||||
FieldsShape::Arbitrary { ref offsets, .. } => offsets[FieldIdx::from_usize(i)],
|
FieldsShape::Arbitrary { ref offsets, .. } => offsets[FieldIdx::new(i)],
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1200,7 +1187,7 @@ impl FieldsShape {
|
|||||||
}
|
}
|
||||||
FieldsShape::Union(_) | FieldsShape::Array { .. } => i,
|
FieldsShape::Union(_) | FieldsShape::Array { .. } => i,
|
||||||
FieldsShape::Arbitrary { ref memory_index, .. } => {
|
FieldsShape::Arbitrary { ref memory_index, .. } => {
|
||||||
memory_index[FieldIdx::from_usize(i)].try_into().unwrap()
|
memory_index[FieldIdx::new(i)].try_into().unwrap()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1216,7 +1203,7 @@ impl FieldsShape {
|
|||||||
if let FieldsShape::Arbitrary { ref memory_index, .. } = *self {
|
if let FieldsShape::Arbitrary { ref memory_index, .. } = *self {
|
||||||
if use_small {
|
if use_small {
|
||||||
for (field_idx, &mem_idx) in memory_index.iter_enumerated() {
|
for (field_idx, &mem_idx) in memory_index.iter_enumerated() {
|
||||||
inverse_small[mem_idx as usize] = field_idx.as_u32() as u8;
|
inverse_small[mem_idx as usize] = field_idx.index() as u8;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
inverse_big = memory_index.invert_bijective_mapping();
|
inverse_big = memory_index.invert_bijective_mapping();
|
||||||
@ -1229,7 +1216,7 @@ impl FieldsShape {
|
|||||||
if use_small {
|
if use_small {
|
||||||
inverse_small[i] as usize
|
inverse_small[i] as usize
|
||||||
} else {
|
} else {
|
||||||
inverse_big[i as u32].as_usize()
|
inverse_big[i as u32].index()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
@ -1252,7 +1239,6 @@ impl AddressSpace {
|
|||||||
/// in terms of categories of C types there are ABI rules for.
|
/// in terms of categories of C types there are ABI rules for.
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
|
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
|
||||||
#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
|
#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
|
||||||
|
|
||||||
pub enum Abi {
|
pub enum Abi {
|
||||||
Uninhabited,
|
Uninhabited,
|
||||||
Scalar(Scalar),
|
Scalar(Scalar),
|
||||||
@ -1373,9 +1359,10 @@ impl Abi {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// NOTE: This struct is generic over the FieldIdx and VariantIdx for rust-analyzer usage.
|
||||||
#[derive(PartialEq, Eq, Hash, Clone, Debug)]
|
#[derive(PartialEq, Eq, Hash, Clone, Debug)]
|
||||||
#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
|
#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
|
||||||
pub enum Variants {
|
pub enum Variants<FieldIdx: Idx, VariantIdx: Idx> {
|
||||||
/// Single enum variants, structs/tuples, unions, and all non-ADTs.
|
/// Single enum variants, structs/tuples, unions, and all non-ADTs.
|
||||||
Single { index: VariantIdx },
|
Single { index: VariantIdx },
|
||||||
|
|
||||||
@ -1387,15 +1374,16 @@ pub enum Variants {
|
|||||||
/// For enums, the tag is the sole field of the layout.
|
/// For enums, the tag is the sole field of the layout.
|
||||||
Multiple {
|
Multiple {
|
||||||
tag: Scalar,
|
tag: Scalar,
|
||||||
tag_encoding: TagEncoding,
|
tag_encoding: TagEncoding<VariantIdx>,
|
||||||
tag_field: usize,
|
tag_field: usize,
|
||||||
variants: IndexVec<VariantIdx, LayoutS>,
|
variants: IndexVec<VariantIdx, LayoutS<FieldIdx, VariantIdx>>,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// NOTE: This struct is generic over the VariantIdx for rust-analyzer usage.
|
||||||
#[derive(PartialEq, Eq, Hash, Clone, Debug)]
|
#[derive(PartialEq, Eq, Hash, Clone, Debug)]
|
||||||
#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
|
#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
|
||||||
pub enum TagEncoding {
|
pub enum TagEncoding<VariantIdx: Idx> {
|
||||||
/// The tag directly stores the discriminant, but possibly with a smaller layout
|
/// The tag directly stores the discriminant, but possibly with a smaller layout
|
||||||
/// (so converting the tag to the discriminant can require sign extension).
|
/// (so converting the tag to the discriminant can require sign extension).
|
||||||
Direct,
|
Direct,
|
||||||
@ -1457,17 +1445,19 @@ impl Niche {
|
|||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Extend the range of valid values being reserved by moving either `v.start` or `v.end` bound.
|
// Extend the range of valid values being reserved by moving either `v.start` or `v.end`
|
||||||
// Given an eventual `Option<T>`, we try to maximize the chance for `None` to occupy the niche of zero.
|
// bound. Given an eventual `Option<T>`, we try to maximize the chance for `None` to occupy
|
||||||
// This is accomplished by preferring enums with 2 variants(`count==1`) and always taking the shortest path to niche zero.
|
// the niche of zero. This is accomplished by preferring enums with 2 variants(`count==1`)
|
||||||
// Having `None` in niche zero can enable some special optimizations.
|
// and always taking the shortest path to niche zero. Having `None` in niche zero can
|
||||||
|
// enable some special optimizations.
|
||||||
//
|
//
|
||||||
// Bound selection criteria:
|
// Bound selection criteria:
|
||||||
// 1. Select closest to zero given wrapping semantics.
|
// 1. Select closest to zero given wrapping semantics.
|
||||||
// 2. Avoid moving past zero if possible.
|
// 2. Avoid moving past zero if possible.
|
||||||
//
|
//
|
||||||
// In practice this means that enums with `count > 1` are unlikely to claim niche zero, since they have to fit perfectly.
|
// In practice this means that enums with `count > 1` are unlikely to claim niche zero,
|
||||||
// If niche zero is already reserved, the selection of bounds are of little interest.
|
// since they have to fit perfectly. If niche zero is already reserved, the selection of
|
||||||
|
// bounds are of little interest.
|
||||||
let move_start = |v: WrappingRange| {
|
let move_start = |v: WrappingRange| {
|
||||||
let start = v.start.wrapping_sub(count) & max_value;
|
let start = v.start.wrapping_sub(count) & max_value;
|
||||||
Some((start, Scalar::Initialized { value, valid_range: v.with_start(start) }))
|
Some((start, Scalar::Initialized { value, valid_range: v.with_start(start) }))
|
||||||
@ -1501,38 +1491,21 @@ impl Niche {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
rustc_index::newtype_index! {
|
// NOTE: This struct is generic over the FieldIdx and VariantIdx for rust-analyzer usage.
|
||||||
/// The *source-order* index of a variant in a type.
|
|
||||||
///
|
|
||||||
/// For enums, these are always `0..variant_count`, regardless of any
|
|
||||||
/// custom discriminants that may have been defined, and including any
|
|
||||||
/// variants that may end up uninhabited due to field types. (Some of the
|
|
||||||
/// variants may not be present in a monomorphized ABI [`Variants`], but
|
|
||||||
/// those skipped variants are always counted when determining the *index*.)
|
|
||||||
///
|
|
||||||
/// `struct`s, `tuples`, and `unions`s are considered to have a single variant
|
|
||||||
/// with variant index zero, aka [`FIRST_VARIANT`].
|
|
||||||
#[derive(HashStable_Generic)]
|
|
||||||
pub struct VariantIdx {
|
|
||||||
/// Equivalent to `VariantIdx(0)`.
|
|
||||||
const FIRST_VARIANT = 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(PartialEq, Eq, Hash, Clone)]
|
#[derive(PartialEq, Eq, Hash, Clone)]
|
||||||
#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
|
#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
|
||||||
pub struct LayoutS {
|
pub struct LayoutS<FieldIdx: Idx, VariantIdx: Idx> {
|
||||||
/// Says where the fields are located within the layout.
|
/// Says where the fields are located within the layout.
|
||||||
pub fields: FieldsShape,
|
pub fields: FieldsShape<FieldIdx>,
|
||||||
|
|
||||||
/// Encodes information about multi-variant layouts.
|
/// Encodes information about multi-variant layouts.
|
||||||
/// Even with `Multiple` variants, a layout still has its own fields! Those are then
|
/// Even with `Multiple` variants, a layout still has its own fields! Those are then
|
||||||
/// shared between all variants. One of them will be the discriminant,
|
/// shared between all variants. One of them will be the discriminant,
|
||||||
/// but e.g. generators can have more.
|
/// but e.g. coroutines can have more.
|
||||||
///
|
///
|
||||||
/// To access all fields of this layout, both `fields` and the fields of the active variant
|
/// To access all fields of this layout, both `fields` and the fields of the active variant
|
||||||
/// must be taken into account.
|
/// must be taken into account.
|
||||||
pub variants: Variants,
|
pub variants: Variants<FieldIdx, VariantIdx>,
|
||||||
|
|
||||||
/// The `abi` defines how this data is passed between functions, and it defines
|
/// The `abi` defines how this data is passed between functions, and it defines
|
||||||
/// value restrictions via `valid_range`.
|
/// value restrictions via `valid_range`.
|
||||||
@ -1561,13 +1534,13 @@ pub struct LayoutS {
|
|||||||
pub unadjusted_abi_align: Align,
|
pub unadjusted_abi_align: Align,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl LayoutS {
|
impl<FieldIdx: Idx, VariantIdx: Idx> LayoutS<FieldIdx, VariantIdx> {
|
||||||
pub fn scalar<C: HasDataLayout>(cx: &C, scalar: Scalar) -> Self {
|
pub fn scalar<C: HasDataLayout>(cx: &C, scalar: Scalar) -> Self {
|
||||||
let largest_niche = Niche::from_scalar(cx, Size::ZERO, scalar);
|
let largest_niche = Niche::from_scalar(cx, Size::ZERO, scalar);
|
||||||
let size = scalar.size(cx);
|
let size = scalar.size(cx);
|
||||||
let align = scalar.align(cx);
|
let align = scalar.align(cx);
|
||||||
LayoutS {
|
LayoutS {
|
||||||
variants: Variants::Single { index: FIRST_VARIANT },
|
variants: Variants::Single { index: VariantIdx::new(0) },
|
||||||
fields: FieldsShape::Primitive,
|
fields: FieldsShape::Primitive,
|
||||||
abi: Abi::Scalar(scalar),
|
abi: Abi::Scalar(scalar),
|
||||||
largest_niche,
|
largest_niche,
|
||||||
@ -1579,7 +1552,11 @@ impl LayoutS {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Debug for LayoutS {
|
impl<FieldIdx: Idx, VariantIdx: Idx> fmt::Debug for LayoutS<FieldIdx, VariantIdx>
|
||||||
|
where
|
||||||
|
FieldsShape<FieldIdx>: fmt::Debug,
|
||||||
|
Variants<FieldIdx, VariantIdx>: fmt::Debug,
|
||||||
|
{
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
// This is how `Layout` used to print before it become
|
// This is how `Layout` used to print before it become
|
||||||
// `Interned<LayoutS>`. We print it like this to avoid having to update
|
// `Interned<LayoutS>`. We print it like this to avoid having to update
|
||||||
@ -1607,61 +1584,6 @@ impl fmt::Debug for LayoutS {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, HashStable_Generic)]
|
|
||||||
#[rustc_pass_by_value]
|
|
||||||
pub struct Layout<'a>(pub Interned<'a, LayoutS>);
|
|
||||||
|
|
||||||
impl<'a> fmt::Debug for Layout<'a> {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
// See comment on `<LayoutS as Debug>::fmt` above.
|
|
||||||
self.0.0.fmt(f)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> Layout<'a> {
|
|
||||||
pub fn fields(self) -> &'a FieldsShape {
|
|
||||||
&self.0.0.fields
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn variants(self) -> &'a Variants {
|
|
||||||
&self.0.0.variants
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn abi(self) -> Abi {
|
|
||||||
self.0.0.abi
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn largest_niche(self) -> Option<Niche> {
|
|
||||||
self.0.0.largest_niche
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn align(self) -> AbiAndPrefAlign {
|
|
||||||
self.0.0.align
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn size(self) -> Size {
|
|
||||||
self.0.0.size
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn max_repr_align(self) -> Option<Align> {
|
|
||||||
self.0.0.max_repr_align
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn unadjusted_abi_align(self) -> Align {
|
|
||||||
self.0.0.unadjusted_abi_align
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Whether the layout is from a type that implements [`std::marker::PointerLike`].
|
|
||||||
///
|
|
||||||
/// Currently, that means that the type is pointer-sized, pointer-aligned,
|
|
||||||
/// and has a scalar ABI.
|
|
||||||
pub fn is_pointer_like(self, data_layout: &TargetDataLayout) -> bool {
|
|
||||||
self.size() == data_layout.pointer_size
|
|
||||||
&& self.align().abi == data_layout.pointer_align.abi
|
|
||||||
&& matches!(self.abi(), Abi::Scalar(..))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
|
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
|
||||||
pub enum PointerKind {
|
pub enum PointerKind {
|
||||||
/// Shared reference. `frozen` indicates the absence of any `UnsafeCell`.
|
/// Shared reference. `frozen` indicates the absence of any `UnsafeCell`.
|
||||||
@ -1681,7 +1603,7 @@ pub struct PointeeInfo {
|
|||||||
pub safe: Option<PointerKind>,
|
pub safe: Option<PointerKind>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl LayoutS {
|
impl<FieldIdx: Idx, VariantIdx: Idx> LayoutS<FieldIdx, VariantIdx> {
|
||||||
/// Returns `true` if the layout corresponds to an unsized type.
|
/// Returns `true` if the layout corresponds to an unsized type.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn is_unsized(&self) -> bool {
|
pub fn is_unsized(&self) -> bool {
|
||||||
|
|||||||
@ -4,4 +4,6 @@ version = "0.0.0"
|
|||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
# tidy-alphabetical-start
|
||||||
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
|
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
|
||||||
|
# tidy-alphabetical-end
|
||||||
|
|||||||
@ -11,13 +11,13 @@
|
|||||||
html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/",
|
html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/",
|
||||||
test(no_crate_inject, attr(deny(warnings)))
|
test(no_crate_inject, attr(deny(warnings)))
|
||||||
)]
|
)]
|
||||||
|
#![cfg_attr(not(bootstrap), doc(rust_logo))]
|
||||||
|
#![cfg_attr(not(bootstrap), feature(rustdoc_internals))]
|
||||||
#![feature(core_intrinsics)]
|
#![feature(core_intrinsics)]
|
||||||
#![feature(dropck_eyepatch)]
|
#![feature(dropck_eyepatch)]
|
||||||
#![feature(new_uninit)]
|
#![feature(new_uninit)]
|
||||||
#![feature(maybe_uninit_slice)]
|
#![feature(maybe_uninit_slice)]
|
||||||
#![feature(min_specialization)]
|
|
||||||
#![feature(decl_macro)]
|
#![feature(decl_macro)]
|
||||||
#![feature(pointer_byte_offsets)]
|
|
||||||
#![feature(rustc_attrs)]
|
#![feature(rustc_attrs)]
|
||||||
#![cfg_attr(test, feature(test))]
|
#![cfg_attr(test, feature(test))]
|
||||||
#![feature(strict_provenance)]
|
#![feature(strict_provenance)]
|
||||||
@ -44,23 +44,6 @@ fn outline<F: FnOnce() -> R, R>(f: F) -> R {
|
|||||||
f()
|
f()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// An arena that can hold objects of only one type.
|
|
||||||
pub struct TypedArena<T> {
|
|
||||||
/// A pointer to the next object to be allocated.
|
|
||||||
ptr: Cell<*mut T>,
|
|
||||||
|
|
||||||
/// A pointer to the end of the allocated area. When this pointer is
|
|
||||||
/// reached, a new chunk is allocated.
|
|
||||||
end: Cell<*mut T>,
|
|
||||||
|
|
||||||
/// A vector of arena chunks.
|
|
||||||
chunks: RefCell<Vec<ArenaChunk<T>>>,
|
|
||||||
|
|
||||||
/// Marker indicating that dropping the arena causes its owned
|
|
||||||
/// instances of `T` to be dropped.
|
|
||||||
_own: PhantomData<T>,
|
|
||||||
}
|
|
||||||
|
|
||||||
struct ArenaChunk<T = u8> {
|
struct ArenaChunk<T = u8> {
|
||||||
/// The raw storage for the arena chunk.
|
/// The raw storage for the arena chunk.
|
||||||
storage: NonNull<[MaybeUninit<T>]>,
|
storage: NonNull<[MaybeUninit<T>]>,
|
||||||
@ -130,6 +113,23 @@ impl<T> ArenaChunk<T> {
|
|||||||
const PAGE: usize = 4096;
|
const PAGE: usize = 4096;
|
||||||
const HUGE_PAGE: usize = 2 * 1024 * 1024;
|
const HUGE_PAGE: usize = 2 * 1024 * 1024;
|
||||||
|
|
||||||
|
/// An arena that can hold objects of only one type.
|
||||||
|
pub struct TypedArena<T> {
|
||||||
|
/// A pointer to the next object to be allocated.
|
||||||
|
ptr: Cell<*mut T>,
|
||||||
|
|
||||||
|
/// A pointer to the end of the allocated area. When this pointer is
|
||||||
|
/// reached, a new chunk is allocated.
|
||||||
|
end: Cell<*mut T>,
|
||||||
|
|
||||||
|
/// A vector of arena chunks.
|
||||||
|
chunks: RefCell<Vec<ArenaChunk<T>>>,
|
||||||
|
|
||||||
|
/// Marker indicating that dropping the arena causes its owned
|
||||||
|
/// instances of `T` to be dropped.
|
||||||
|
_own: PhantomData<T>,
|
||||||
|
}
|
||||||
|
|
||||||
impl<T> Default for TypedArena<T> {
|
impl<T> Default for TypedArena<T> {
|
||||||
/// Creates a new `TypedArena`.
|
/// Creates a new `TypedArena`.
|
||||||
fn default() -> TypedArena<T> {
|
fn default() -> TypedArena<T> {
|
||||||
@ -144,77 +144,6 @@ impl<T> Default for TypedArena<T> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
trait IterExt<T> {
|
|
||||||
fn alloc_from_iter(self, arena: &TypedArena<T>) -> &mut [T];
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<I, T> IterExt<T> for I
|
|
||||||
where
|
|
||||||
I: IntoIterator<Item = T>,
|
|
||||||
{
|
|
||||||
// This default collects into a `SmallVec` and then allocates by copying
|
|
||||||
// from it. The specializations below for types like `Vec` are more
|
|
||||||
// efficient, copying directly without the intermediate collecting step.
|
|
||||||
// This default could be made more efficient, like
|
|
||||||
// `DroplessArena::alloc_from_iter`, but it's not hot enough to bother.
|
|
||||||
#[inline]
|
|
||||||
default fn alloc_from_iter(self, arena: &TypedArena<T>) -> &mut [T] {
|
|
||||||
let vec: SmallVec<[_; 8]> = self.into_iter().collect();
|
|
||||||
vec.alloc_from_iter(arena)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T, const N: usize> IterExt<T> for std::array::IntoIter<T, N> {
|
|
||||||
#[inline]
|
|
||||||
fn alloc_from_iter(self, arena: &TypedArena<T>) -> &mut [T] {
|
|
||||||
let len = self.len();
|
|
||||||
if len == 0 {
|
|
||||||
return &mut [];
|
|
||||||
}
|
|
||||||
// Move the content to the arena by copying and then forgetting it.
|
|
||||||
unsafe {
|
|
||||||
let start_ptr = arena.alloc_raw_slice(len);
|
|
||||||
self.as_slice().as_ptr().copy_to_nonoverlapping(start_ptr, len);
|
|
||||||
mem::forget(self);
|
|
||||||
slice::from_raw_parts_mut(start_ptr, len)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> IterExt<T> for Vec<T> {
|
|
||||||
#[inline]
|
|
||||||
fn alloc_from_iter(mut self, arena: &TypedArena<T>) -> &mut [T] {
|
|
||||||
let len = self.len();
|
|
||||||
if len == 0 {
|
|
||||||
return &mut [];
|
|
||||||
}
|
|
||||||
// Move the content to the arena by copying and then forgetting it.
|
|
||||||
unsafe {
|
|
||||||
let start_ptr = arena.alloc_raw_slice(len);
|
|
||||||
self.as_ptr().copy_to_nonoverlapping(start_ptr, len);
|
|
||||||
self.set_len(0);
|
|
||||||
slice::from_raw_parts_mut(start_ptr, len)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<A: smallvec::Array> IterExt<A::Item> for SmallVec<A> {
|
|
||||||
#[inline]
|
|
||||||
fn alloc_from_iter(mut self, arena: &TypedArena<A::Item>) -> &mut [A::Item] {
|
|
||||||
let len = self.len();
|
|
||||||
if len == 0 {
|
|
||||||
return &mut [];
|
|
||||||
}
|
|
||||||
// Move the content to the arena by copying and then forgetting it.
|
|
||||||
unsafe {
|
|
||||||
let start_ptr = arena.alloc_raw_slice(len);
|
|
||||||
self.as_ptr().copy_to_nonoverlapping(start_ptr, len);
|
|
||||||
self.set_len(0);
|
|
||||||
slice::from_raw_parts_mut(start_ptr, len)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> TypedArena<T> {
|
impl<T> TypedArena<T> {
|
||||||
/// Allocates an object in the `TypedArena`, returning a reference to it.
|
/// Allocates an object in the `TypedArena`, returning a reference to it.
|
||||||
#[inline]
|
#[inline]
|
||||||
@ -250,33 +179,55 @@ impl<T> TypedArena<T> {
|
|||||||
available_bytes >= additional_bytes
|
available_bytes >= additional_bytes
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Ensures there's enough space in the current chunk to fit `len` objects.
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn ensure_capacity(&self, additional: usize) {
|
fn alloc_raw_slice(&self, len: usize) -> *mut T {
|
||||||
if !self.can_allocate(additional) {
|
|
||||||
self.grow(additional);
|
|
||||||
debug_assert!(self.can_allocate(additional));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[inline]
|
|
||||||
unsafe fn alloc_raw_slice(&self, len: usize) -> *mut T {
|
|
||||||
assert!(mem::size_of::<T>() != 0);
|
assert!(mem::size_of::<T>() != 0);
|
||||||
assert!(len != 0);
|
assert!(len != 0);
|
||||||
|
|
||||||
self.ensure_capacity(len);
|
// Ensure the current chunk can fit `len` objects.
|
||||||
|
if !self.can_allocate(len) {
|
||||||
|
self.grow(len);
|
||||||
|
debug_assert!(self.can_allocate(len));
|
||||||
|
}
|
||||||
|
|
||||||
let start_ptr = self.ptr.get();
|
let start_ptr = self.ptr.get();
|
||||||
// SAFETY: `self.ensure_capacity` makes sure that there is enough space
|
// SAFETY: `can_allocate`/`grow` ensures that there is enough space for
|
||||||
// for `len` elements.
|
// `len` elements.
|
||||||
unsafe { self.ptr.set(start_ptr.add(len)) };
|
unsafe { self.ptr.set(start_ptr.add(len)) };
|
||||||
start_ptr
|
start_ptr
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn alloc_from_iter<I: IntoIterator<Item = T>>(&self, iter: I) -> &mut [T] {
|
pub fn alloc_from_iter<I: IntoIterator<Item = T>>(&self, iter: I) -> &mut [T] {
|
||||||
|
// This implementation is entirely separate to
|
||||||
|
// `DroplessIterator::alloc_from_iter`, even though conceptually they
|
||||||
|
// are the same.
|
||||||
|
//
|
||||||
|
// `DroplessIterator` (in the fast case) writes elements from the
|
||||||
|
// iterator one at a time into the allocated memory. That's easy
|
||||||
|
// because the elements don't implement `Drop`. But for `TypedArena`
|
||||||
|
// they do implement `Drop`, which means that if the iterator panics we
|
||||||
|
// could end up with some allocated-but-uninitialized elements, which
|
||||||
|
// will then cause UB in `TypedArena::drop`.
|
||||||
|
//
|
||||||
|
// Instead we use an approach where any iterator panic will occur
|
||||||
|
// before the memory is allocated. This function is much less hot than
|
||||||
|
// `DroplessArena::alloc_from_iter`, so it doesn't need to be
|
||||||
|
// hyper-optimized.
|
||||||
assert!(mem::size_of::<T>() != 0);
|
assert!(mem::size_of::<T>() != 0);
|
||||||
iter.alloc_from_iter(self)
|
|
||||||
|
let mut vec: SmallVec<[_; 8]> = iter.into_iter().collect();
|
||||||
|
if vec.is_empty() {
|
||||||
|
return &mut [];
|
||||||
|
}
|
||||||
|
// Move the content to the arena by copying and then forgetting it.
|
||||||
|
let len = vec.len();
|
||||||
|
let start_ptr = self.alloc_raw_slice(len);
|
||||||
|
unsafe {
|
||||||
|
vec.as_ptr().copy_to_nonoverlapping(start_ptr, len);
|
||||||
|
vec.set_len(0);
|
||||||
|
slice::from_raw_parts_mut(start_ptr, len)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Grows the arena.
|
/// Grows the arena.
|
||||||
@ -407,6 +358,8 @@ impl Default for DroplessArena {
|
|||||||
#[inline]
|
#[inline]
|
||||||
fn default() -> DroplessArena {
|
fn default() -> DroplessArena {
|
||||||
DroplessArena {
|
DroplessArena {
|
||||||
|
// We set both `start` and `end` to 0 so that the first call to
|
||||||
|
// alloc() will trigger a grow().
|
||||||
start: Cell::new(ptr::null_mut()),
|
start: Cell::new(ptr::null_mut()),
|
||||||
end: Cell::new(ptr::null_mut()),
|
end: Cell::new(ptr::null_mut()),
|
||||||
chunks: Default::default(),
|
chunks: Default::default(),
|
||||||
@ -415,9 +368,11 @@ impl Default for DroplessArena {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl DroplessArena {
|
impl DroplessArena {
|
||||||
|
#[inline(never)]
|
||||||
|
#[cold]
|
||||||
fn grow(&self, layout: Layout) {
|
fn grow(&self, layout: Layout) {
|
||||||
// Add some padding so we can align `self.end` while
|
// Add some padding so we can align `self.end` while
|
||||||
// stilling fitting in a `layout` allocation.
|
// still fitting in a `layout` allocation.
|
||||||
let additional = layout.size() + cmp::max(DROPLESS_ALIGNMENT, layout.align()) - 1;
|
let additional = layout.size() + cmp::max(DROPLESS_ALIGNMENT, layout.align()) - 1;
|
||||||
|
|
||||||
unsafe {
|
unsafe {
|
||||||
@ -441,7 +396,7 @@ impl DroplessArena {
|
|||||||
let mut chunk = ArenaChunk::new(align_up(new_cap, PAGE));
|
let mut chunk = ArenaChunk::new(align_up(new_cap, PAGE));
|
||||||
self.start.set(chunk.start());
|
self.start.set(chunk.start());
|
||||||
|
|
||||||
// Align the end to DROPLESS_ALIGNMENT
|
// Align the end to DROPLESS_ALIGNMENT.
|
||||||
let end = align_down(chunk.end().addr(), DROPLESS_ALIGNMENT);
|
let end = align_down(chunk.end().addr(), DROPLESS_ALIGNMENT);
|
||||||
|
|
||||||
// Make sure we don't go past `start`. This should not happen since the allocation
|
// Make sure we don't go past `start`. This should not happen since the allocation
|
||||||
@ -454,55 +409,40 @@ impl DroplessArena {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline(never)]
|
|
||||||
#[cold]
|
|
||||||
fn grow_and_alloc_raw(&self, layout: Layout) -> *mut u8 {
|
|
||||||
self.grow(layout);
|
|
||||||
self.alloc_raw_without_grow(layout).unwrap()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[inline(never)]
|
|
||||||
#[cold]
|
|
||||||
fn grow_and_alloc<T>(&self) -> *mut u8 {
|
|
||||||
self.grow_and_alloc_raw(Layout::new::<T>())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Allocates a byte slice with specified layout from the current memory
|
|
||||||
/// chunk. Returns `None` if there is no free space left to satisfy the
|
|
||||||
/// request.
|
|
||||||
#[inline]
|
|
||||||
fn alloc_raw_without_grow(&self, layout: Layout) -> Option<*mut u8> {
|
|
||||||
let start = self.start.get().addr();
|
|
||||||
let old_end = self.end.get();
|
|
||||||
let end = old_end.addr();
|
|
||||||
|
|
||||||
// Align allocated bytes so that `self.end` stays aligned to DROPLESS_ALIGNMENT
|
|
||||||
let bytes = align_up(layout.size(), DROPLESS_ALIGNMENT);
|
|
||||||
|
|
||||||
// Tell LLVM that `end` is aligned to DROPLESS_ALIGNMENT
|
|
||||||
unsafe { intrinsics::assume(end == align_down(end, DROPLESS_ALIGNMENT)) };
|
|
||||||
|
|
||||||
let new_end = align_down(end.checked_sub(bytes)?, layout.align());
|
|
||||||
if start <= new_end {
|
|
||||||
let new_end = old_end.with_addr(new_end);
|
|
||||||
// `new_end` is aligned to DROPLESS_ALIGNMENT as `align_down` preserves alignment
|
|
||||||
// as both `end` and `bytes` are already aligned to DROPLESS_ALIGNMENT.
|
|
||||||
self.end.set(new_end);
|
|
||||||
Some(new_end)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn alloc_raw(&self, layout: Layout) -> *mut u8 {
|
pub fn alloc_raw(&self, layout: Layout) -> *mut u8 {
|
||||||
assert!(layout.size() != 0);
|
assert!(layout.size() != 0);
|
||||||
if let Some(a) = self.alloc_raw_without_grow(layout) {
|
|
||||||
return a;
|
// This loop executes once or twice: if allocation fails the first
|
||||||
|
// time, the `grow` ensures it will succeed the second time.
|
||||||
|
loop {
|
||||||
|
let start = self.start.get().addr();
|
||||||
|
let old_end = self.end.get();
|
||||||
|
let end = old_end.addr();
|
||||||
|
|
||||||
|
// Align allocated bytes so that `self.end` stays aligned to
|
||||||
|
// DROPLESS_ALIGNMENT.
|
||||||
|
let bytes = align_up(layout.size(), DROPLESS_ALIGNMENT);
|
||||||
|
|
||||||
|
// Tell LLVM that `end` is aligned to DROPLESS_ALIGNMENT.
|
||||||
|
unsafe { intrinsics::assume(end == align_down(end, DROPLESS_ALIGNMENT)) };
|
||||||
|
|
||||||
|
if let Some(sub) = end.checked_sub(bytes) {
|
||||||
|
let new_end = align_down(sub, layout.align());
|
||||||
|
if start <= new_end {
|
||||||
|
let new_end = old_end.with_addr(new_end);
|
||||||
|
// `new_end` is aligned to DROPLESS_ALIGNMENT as `align_down`
|
||||||
|
// preserves alignment as both `end` and `bytes` are already
|
||||||
|
// aligned to DROPLESS_ALIGNMENT.
|
||||||
|
self.end.set(new_end);
|
||||||
|
return new_end;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// No free space left. Allocate a new chunk to satisfy the request.
|
||||||
|
// On failure the grow will panic or abort.
|
||||||
|
self.grow(layout);
|
||||||
}
|
}
|
||||||
// No free space left. Allocate a new chunk to satisfy the request.
|
|
||||||
// On failure the grow will panic or abort.
|
|
||||||
self.grow_and_alloc_raw(layout)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
@ -510,13 +450,7 @@ impl DroplessArena {
|
|||||||
assert!(!mem::needs_drop::<T>());
|
assert!(!mem::needs_drop::<T>());
|
||||||
assert!(mem::size_of::<T>() != 0);
|
assert!(mem::size_of::<T>() != 0);
|
||||||
|
|
||||||
let mem = if let Some(a) = self.alloc_raw_without_grow(Layout::for_value::<T>(&object)) {
|
let mem = self.alloc_raw(Layout::new::<T>()) as *mut T;
|
||||||
a
|
|
||||||
} else {
|
|
||||||
// No free space left. Allocate a new chunk to satisfy the request.
|
|
||||||
// On failure the grow will panic or abort.
|
|
||||||
self.grow_and_alloc::<T>()
|
|
||||||
} as *mut T;
|
|
||||||
|
|
||||||
unsafe {
|
unsafe {
|
||||||
// Write into uninitialized memory.
|
// Write into uninitialized memory.
|
||||||
@ -713,10 +647,10 @@ pub macro declare_arena([$($a:tt $name:ident: $ty:ty,)*]) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[allow(clippy::mut_from_ref)]
|
#[allow(clippy::mut_from_ref)]
|
||||||
pub fn alloc_from_iter<'a, T: ArenaAllocatable<'tcx, C>, C>(
|
pub fn alloc_from_iter<T: ArenaAllocatable<'tcx, C>, C>(
|
||||||
&'a self,
|
&self,
|
||||||
iter: impl ::std::iter::IntoIterator<Item = T>,
|
iter: impl ::std::iter::IntoIterator<Item = T>,
|
||||||
) -> &'a mut [T] {
|
) -> &mut [T] {
|
||||||
T::allocate_from_iter(self, iter)
|
T::allocate_from_iter(self, iter)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -3,9 +3,8 @@ name = "rustc_ast"
|
|||||||
version = "0.0.0"
|
version = "0.0.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
[lib]
|
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
# tidy-alphabetical-start
|
||||||
bitflags = "1.2.1"
|
bitflags = "1.2.1"
|
||||||
memchr = "2.5.0"
|
memchr = "2.5.0"
|
||||||
rustc_data_structures = { path = "../rustc_data_structures" }
|
rustc_data_structures = { path = "../rustc_data_structures" }
|
||||||
@ -14,6 +13,9 @@ rustc_lexer = { path = "../rustc_lexer" }
|
|||||||
rustc_macros = { path = "../rustc_macros" }
|
rustc_macros = { path = "../rustc_macros" }
|
||||||
rustc_serialize = { path = "../rustc_serialize" }
|
rustc_serialize = { path = "../rustc_serialize" }
|
||||||
rustc_span = { path = "../rustc_span" }
|
rustc_span = { path = "../rustc_span" }
|
||||||
|
# For Mutability and Movability, which could be uplifted into a common crate.
|
||||||
|
rustc_type_ir = { path = "../rustc_type_ir" }
|
||||||
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
|
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
|
||||||
thin-vec = "0.2.12"
|
thin-vec = "0.2.12"
|
||||||
tracing = "0.1"
|
tracing = "0.1"
|
||||||
|
# tidy-alphabetical-end
|
||||||
|
|||||||
@ -34,6 +34,7 @@ use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
|
|||||||
use rustc_span::source_map::{respan, Spanned};
|
use rustc_span::source_map::{respan, Spanned};
|
||||||
use rustc_span::symbol::{kw, sym, Ident, Symbol};
|
use rustc_span::symbol::{kw, sym, Ident, Symbol};
|
||||||
use rustc_span::{ErrorGuaranteed, Span, DUMMY_SP};
|
use rustc_span::{ErrorGuaranteed, Span, DUMMY_SP};
|
||||||
|
pub use rustc_type_ir::{Movability, Mutability};
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::mem;
|
use std::mem;
|
||||||
use thin_vec::{thin_vec, ThinVec};
|
use thin_vec::{thin_vec, ThinVec};
|
||||||
@ -733,6 +734,8 @@ pub enum RangeSyntax {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// All the different flavors of pattern that Rust recognizes.
|
/// All the different flavors of pattern that Rust recognizes.
|
||||||
|
//
|
||||||
|
// Adding a new variant? Please update `test_pat` in `tests/ui/macros/stringify.rs`.
|
||||||
#[derive(Clone, Encodable, Decodable, Debug)]
|
#[derive(Clone, Encodable, Decodable, Debug)]
|
||||||
pub enum PatKind {
|
pub enum PatKind {
|
||||||
/// Represents a wildcard pattern (`_`).
|
/// Represents a wildcard pattern (`_`).
|
||||||
@ -800,57 +803,6 @@ pub enum PatKind {
|
|||||||
MacCall(P<MacCall>),
|
MacCall(P<MacCall>),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Copy)]
|
|
||||||
#[derive(HashStable_Generic, Encodable, Decodable)]
|
|
||||||
pub enum Mutability {
|
|
||||||
// N.B. Order is deliberate, so that Not < Mut
|
|
||||||
Not,
|
|
||||||
Mut,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Mutability {
|
|
||||||
pub fn invert(self) -> Self {
|
|
||||||
match self {
|
|
||||||
Mutability::Mut => Mutability::Not,
|
|
||||||
Mutability::Not => Mutability::Mut,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns `""` (empty string) or `"mut "` depending on the mutability.
|
|
||||||
pub fn prefix_str(self) -> &'static str {
|
|
||||||
match self {
|
|
||||||
Mutability::Mut => "mut ",
|
|
||||||
Mutability::Not => "",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns `"&"` or `"&mut "` depending on the mutability.
|
|
||||||
pub fn ref_prefix_str(self) -> &'static str {
|
|
||||||
match self {
|
|
||||||
Mutability::Not => "&",
|
|
||||||
Mutability::Mut => "&mut ",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns `""` (empty string) or `"mutably "` depending on the mutability.
|
|
||||||
pub fn mutably_str(self) -> &'static str {
|
|
||||||
match self {
|
|
||||||
Mutability::Not => "",
|
|
||||||
Mutability::Mut => "mutably ",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return `true` if self is mutable
|
|
||||||
pub fn is_mut(self) -> bool {
|
|
||||||
matches!(self, Self::Mut)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return `true` if self is **not** mutable
|
|
||||||
pub fn is_not(self) -> bool {
|
|
||||||
matches!(self, Self::Not)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The kind of borrow in an `AddrOf` expression,
|
/// The kind of borrow in an `AddrOf` expression,
|
||||||
/// e.g., `&place` or `&raw const place`.
|
/// e.g., `&place` or `&raw const place`.
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
||||||
@ -1017,6 +969,7 @@ impl Stmt {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Adding a new variant? Please update `test_stmt` in `tests/ui/macros/stringify.rs`.
|
||||||
#[derive(Clone, Encodable, Decodable, Debug)]
|
#[derive(Clone, Encodable, Decodable, Debug)]
|
||||||
pub enum StmtKind {
|
pub enum StmtKind {
|
||||||
/// A local (let) binding.
|
/// A local (let) binding.
|
||||||
@ -1282,7 +1235,7 @@ impl Expr {
|
|||||||
ExprKind::Closure(..) => ExprPrecedence::Closure,
|
ExprKind::Closure(..) => ExprPrecedence::Closure,
|
||||||
ExprKind::Block(..) => ExprPrecedence::Block,
|
ExprKind::Block(..) => ExprPrecedence::Block,
|
||||||
ExprKind::TryBlock(..) => ExprPrecedence::TryBlock,
|
ExprKind::TryBlock(..) => ExprPrecedence::TryBlock,
|
||||||
ExprKind::Async(..) => ExprPrecedence::Async,
|
ExprKind::Gen(..) => ExprPrecedence::Gen,
|
||||||
ExprKind::Await(..) => ExprPrecedence::Await,
|
ExprKind::Await(..) => ExprPrecedence::Await,
|
||||||
ExprKind::Assign(..) => ExprPrecedence::Assign,
|
ExprKind::Assign(..) => ExprPrecedence::Assign,
|
||||||
ExprKind::AssignOp(..) => ExprPrecedence::AssignOp,
|
ExprKind::AssignOp(..) => ExprPrecedence::AssignOp,
|
||||||
@ -1395,6 +1348,7 @@ pub struct StructExpr {
|
|||||||
pub rest: StructRest,
|
pub rest: StructRest,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Adding a new variant? Please update `test_expr` in `tests/ui/macros/stringify.rs`.
|
||||||
#[derive(Clone, Encodable, Decodable, Debug)]
|
#[derive(Clone, Encodable, Decodable, Debug)]
|
||||||
pub enum ExprKind {
|
pub enum ExprKind {
|
||||||
/// An array (`[a, b, c, d]`)
|
/// An array (`[a, b, c, d]`)
|
||||||
@ -1451,11 +1405,9 @@ pub enum ExprKind {
|
|||||||
Closure(Box<Closure>),
|
Closure(Box<Closure>),
|
||||||
/// A block (`'label: { ... }`).
|
/// A block (`'label: { ... }`).
|
||||||
Block(P<Block>, Option<Label>),
|
Block(P<Block>, Option<Label>),
|
||||||
/// An async block (`async move { ... }`).
|
/// An `async` block (`async move { ... }`),
|
||||||
///
|
/// or a `gen` block (`gen move { ... }`)
|
||||||
/// The async block used to have a `NodeId`, which was removed in favor of
|
Gen(CaptureBy, P<Block>, GenBlockKind),
|
||||||
/// using the parent `NodeId` of the parent `Expr`.
|
|
||||||
Async(CaptureBy, P<Block>),
|
|
||||||
/// An await expression (`my_future.await`). Span is of await keyword.
|
/// An await expression (`my_future.await`). Span is of await keyword.
|
||||||
Await(P<Expr>, Span),
|
Await(P<Expr>, Span),
|
||||||
|
|
||||||
@ -1545,6 +1497,28 @@ pub enum ExprKind {
|
|||||||
Err,
|
Err,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Used to differentiate between `async {}` blocks and `gen {}` blocks.
|
||||||
|
#[derive(Clone, Encodable, Decodable, Debug, PartialEq, Eq)]
|
||||||
|
pub enum GenBlockKind {
|
||||||
|
Async,
|
||||||
|
Gen,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for GenBlockKind {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
self.modifier().fmt(f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl GenBlockKind {
|
||||||
|
pub fn modifier(&self) -> &'static str {
|
||||||
|
match self {
|
||||||
|
GenBlockKind::Async => "async",
|
||||||
|
GenBlockKind::Gen => "gen",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// The explicit `Self` type in a "qualified path". The actual
|
/// The explicit `Self` type in a "qualified path". The actual
|
||||||
/// path, including the trait and the associated item, is stored
|
/// path, including the trait and the associated item, is stored
|
||||||
/// separately. `position` represents the index of the associated
|
/// separately. `position` represents the index of the associated
|
||||||
@ -1574,22 +1548,14 @@ pub struct QSelf {
|
|||||||
#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
|
#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
|
||||||
pub enum CaptureBy {
|
pub enum CaptureBy {
|
||||||
/// `move |x| y + x`.
|
/// `move |x| y + x`.
|
||||||
Value,
|
Value {
|
||||||
|
/// The span of the `move` keyword.
|
||||||
|
move_kw: Span,
|
||||||
|
},
|
||||||
/// `move` keyword was not specified.
|
/// `move` keyword was not specified.
|
||||||
Ref,
|
Ref,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The movability of a generator / closure literal:
|
|
||||||
/// whether a generator contains self-references, causing it to be `!Unpin`.
|
|
||||||
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Encodable, Decodable, Debug, Copy)]
|
|
||||||
#[derive(HashStable_Generic)]
|
|
||||||
pub enum Movability {
|
|
||||||
/// May contain self-references, `!Unpin`.
|
|
||||||
Static,
|
|
||||||
/// Must not contain self-references, `Unpin`.
|
|
||||||
Movable,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Closure lifetime binder, `for<'a, 'b>` in `for<'a, 'b> |_: &'a (), _: &'b ()|`.
|
/// Closure lifetime binder, `for<'a, 'b>` in `for<'a, 'b> |_: &'a (), _: &'b ()|`.
|
||||||
#[derive(Clone, Encodable, Decodable, Debug)]
|
#[derive(Clone, Encodable, Decodable, Debug)]
|
||||||
pub enum ClosureBinder {
|
pub enum ClosureBinder {
|
||||||
@ -2076,6 +2042,8 @@ pub struct BareFnTy {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// The various kinds of type recognized by the compiler.
|
/// The various kinds of type recognized by the compiler.
|
||||||
|
//
|
||||||
|
// Adding a new variant? Please update `test_ty` in `tests/ui/macros/stringify.rs`.
|
||||||
#[derive(Clone, Encodable, Decodable, Debug)]
|
#[derive(Clone, Encodable, Decodable, Debug)]
|
||||||
pub enum TyKind {
|
pub enum TyKind {
|
||||||
/// A variable-length slice (`[T]`).
|
/// A variable-length slice (`[T]`).
|
||||||
@ -2418,6 +2386,12 @@ pub enum Async {
|
|||||||
No,
|
No,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Encodable, Decodable, Debug)]
|
||||||
|
pub enum Gen {
|
||||||
|
Yes { span: Span, closure_id: NodeId, return_impl_trait_id: NodeId },
|
||||||
|
No,
|
||||||
|
}
|
||||||
|
|
||||||
impl Async {
|
impl Async {
|
||||||
pub fn is_async(self) -> bool {
|
pub fn is_async(self) -> bool {
|
||||||
matches!(self, Async::Yes { .. })
|
matches!(self, Async::Yes { .. })
|
||||||
@ -2941,6 +2915,7 @@ pub struct ConstItem {
|
|||||||
pub expr: Option<P<Expr>>,
|
pub expr: Option<P<Expr>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Adding a new variant? Please update `test_item` in `tests/ui/macros/stringify.rs`.
|
||||||
#[derive(Clone, Encodable, Decodable, Debug)]
|
#[derive(Clone, Encodable, Decodable, Debug)]
|
||||||
pub enum ItemKind {
|
pub enum ItemKind {
|
||||||
/// An `extern crate` item, with the optional *original* crate name if the crate was renamed.
|
/// An `extern crate` item, with the optional *original* crate name if the crate was renamed.
|
||||||
|
|||||||
@ -197,10 +197,10 @@ impl Attribute {
|
|||||||
.unwrap_or_else(|| panic!("attribute is missing tokens: {self:?}"))
|
.unwrap_or_else(|| panic!("attribute is missing tokens: {self:?}"))
|
||||||
.to_attr_token_stream()
|
.to_attr_token_stream()
|
||||||
.to_tokenstream(),
|
.to_tokenstream(),
|
||||||
&AttrKind::DocComment(comment_kind, data) => TokenStream::new(vec![TokenTree::Token(
|
&AttrKind::DocComment(comment_kind, data) => TokenStream::token_alone(
|
||||||
Token::new(token::DocComment(comment_kind, self.style, data), self.span),
|
token::DocComment(comment_kind, self.style, data),
|
||||||
Spacing::Alone,
|
self.span,
|
||||||
)]),
|
),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -520,9 +520,7 @@ impl NestedMetaItem {
|
|||||||
I: Iterator<Item = &'a TokenTree>,
|
I: Iterator<Item = &'a TokenTree>,
|
||||||
{
|
{
|
||||||
match tokens.peek() {
|
match tokens.peek() {
|
||||||
Some(TokenTree::Token(token, _))
|
Some(TokenTree::Token(token, _)) if let Some(lit) = MetaItemLit::from_token(token) => {
|
||||||
if let Some(lit) = MetaItemLit::from_token(token) =>
|
|
||||||
{
|
|
||||||
tokens.next();
|
tokens.next();
|
||||||
return Some(NestedMetaItem::Lit(lit));
|
return Some(NestedMetaItem::Lit(lit));
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,3 +1,7 @@
|
|||||||
|
use crate::{attr, Attribute};
|
||||||
|
use rustc_span::symbol::sym;
|
||||||
|
use rustc_span::Symbol;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum EntryPointType {
|
pub enum EntryPointType {
|
||||||
None,
|
None,
|
||||||
@ -6,3 +10,28 @@ pub enum EntryPointType {
|
|||||||
Start,
|
Start,
|
||||||
OtherMain, // Not an entry point, but some other function named main
|
OtherMain, // Not an entry point, but some other function named main
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn entry_point_type(
|
||||||
|
attrs: &[Attribute],
|
||||||
|
at_root: bool,
|
||||||
|
name: Option<Symbol>,
|
||||||
|
) -> EntryPointType {
|
||||||
|
if attr::contains_name(attrs, sym::start) {
|
||||||
|
EntryPointType::Start
|
||||||
|
} else if attr::contains_name(attrs, sym::rustc_main) {
|
||||||
|
EntryPointType::RustcMainAttr
|
||||||
|
} else {
|
||||||
|
if let Some(name) = name
|
||||||
|
&& name == sym::main
|
||||||
|
{
|
||||||
|
if at_root {
|
||||||
|
// This is a top-level function so it can be `main`.
|
||||||
|
EntryPointType::MainNamed
|
||||||
|
} else {
|
||||||
|
EntryPointType::OtherMain
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
EntryPointType::None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@ -8,6 +8,9 @@
|
|||||||
html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/",
|
html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/",
|
||||||
test(attr(deny(warnings)))
|
test(attr(deny(warnings)))
|
||||||
)]
|
)]
|
||||||
|
#![cfg_attr(not(bootstrap), doc(rust_logo))]
|
||||||
|
#![cfg_attr(not(bootstrap), allow(internal_features))]
|
||||||
|
#![cfg_attr(not(bootstrap), feature(rustdoc_internals))]
|
||||||
#![feature(associated_type_bounds)]
|
#![feature(associated_type_bounds)]
|
||||||
#![feature(box_patterns)]
|
#![feature(box_patterns)]
|
||||||
#![feature(const_trait_impl)]
|
#![feature(const_trait_impl)]
|
||||||
@ -50,14 +53,15 @@ pub mod visit;
|
|||||||
|
|
||||||
pub use self::ast::*;
|
pub use self::ast::*;
|
||||||
pub use self::ast_traits::{AstDeref, AstNodeWrapper, HasAttrs, HasNodeId, HasSpan, HasTokens};
|
pub use self::ast_traits::{AstDeref, AstNodeWrapper, HasAttrs, HasNodeId, HasSpan, HasTokens};
|
||||||
pub use self::format::*;
|
|
||||||
|
|
||||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||||
|
|
||||||
/// Requirements for a `StableHashingContext` to be used in this crate.
|
/// Requirements for a `StableHashingContext` to be used in this crate.
|
||||||
/// This is a hack to allow using the `HashStable_Generic` derive macro
|
/// This is a hack to allow using the `HashStable_Generic` derive macro
|
||||||
/// instead of implementing everything in `rustc_middle`.
|
/// instead of implementing everything in `rustc_middle`.
|
||||||
pub trait HashStableContext: rustc_span::HashStableContext {
|
pub trait HashStableContext:
|
||||||
|
rustc_type_ir::HashStableContext + rustc_span::HashStableContext
|
||||||
|
{
|
||||||
fn hash_attr(&mut self, _: &ast::Attribute, hasher: &mut StableHasher);
|
fn hash_attr(&mut self, _: &ast::Attribute, hasher: &mut StableHasher);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -302,6 +302,10 @@ pub trait MutVisitor: Sized {
|
|||||||
fn visit_format_args(&mut self, fmt: &mut FormatArgs) {
|
fn visit_format_args(&mut self, fmt: &mut FormatArgs) {
|
||||||
noop_visit_format_args(fmt, self)
|
noop_visit_format_args(fmt, self)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn visit_capture_by(&mut self, capture_by: &mut CaptureBy) {
|
||||||
|
noop_visit_capture_by(capture_by, self)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Use a map-style function (`FnOnce(T) -> T`) to overwrite a `&mut T`. Useful
|
/// Use a map-style function (`FnOnce(T) -> T`) to overwrite a `&mut T`. Useful
|
||||||
@ -1397,7 +1401,7 @@ pub fn noop_visit_expr<T: MutVisitor>(
|
|||||||
}
|
}
|
||||||
ExprKind::Closure(box Closure {
|
ExprKind::Closure(box Closure {
|
||||||
binder,
|
binder,
|
||||||
capture_clause: _,
|
capture_clause,
|
||||||
constness,
|
constness,
|
||||||
asyncness,
|
asyncness,
|
||||||
movability: _,
|
movability: _,
|
||||||
@ -1409,6 +1413,7 @@ pub fn noop_visit_expr<T: MutVisitor>(
|
|||||||
vis.visit_closure_binder(binder);
|
vis.visit_closure_binder(binder);
|
||||||
visit_constness(constness, vis);
|
visit_constness(constness, vis);
|
||||||
vis.visit_asyncness(asyncness);
|
vis.visit_asyncness(asyncness);
|
||||||
|
vis.visit_capture_by(capture_clause);
|
||||||
vis.visit_fn_decl(fn_decl);
|
vis.visit_fn_decl(fn_decl);
|
||||||
vis.visit_expr(body);
|
vis.visit_expr(body);
|
||||||
vis.visit_span(fn_decl_span);
|
vis.visit_span(fn_decl_span);
|
||||||
@ -1418,7 +1423,7 @@ pub fn noop_visit_expr<T: MutVisitor>(
|
|||||||
vis.visit_block(blk);
|
vis.visit_block(blk);
|
||||||
visit_opt(label, |label| vis.visit_label(label));
|
visit_opt(label, |label| vis.visit_label(label));
|
||||||
}
|
}
|
||||||
ExprKind::Async(_capture_by, body) => {
|
ExprKind::Gen(_capture_by, body, _) => {
|
||||||
vis.visit_block(body);
|
vis.visit_block(body);
|
||||||
}
|
}
|
||||||
ExprKind::Await(expr, await_kw_span) => {
|
ExprKind::Await(expr, await_kw_span) => {
|
||||||
@ -1562,6 +1567,15 @@ pub fn noop_visit_vis<T: MutVisitor>(visibility: &mut Visibility, vis: &mut T) {
|
|||||||
vis.visit_span(&mut visibility.span);
|
vis.visit_span(&mut visibility.span);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn noop_visit_capture_by<T: MutVisitor>(capture_by: &mut CaptureBy, vis: &mut T) {
|
||||||
|
match capture_by {
|
||||||
|
CaptureBy::Ref => {}
|
||||||
|
CaptureBy::Value { move_kw } => {
|
||||||
|
vis.visit_span(move_kw);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Some value for the AST node that is valid but possibly meaningless.
|
/// Some value for the AST node that is valid but possibly meaningless.
|
||||||
pub trait DummyAstNode {
|
pub trait DummyAstNode {
|
||||||
fn dummy() -> Self;
|
fn dummy() -> Self;
|
||||||
|
|||||||
@ -107,13 +107,11 @@ impl Lit {
|
|||||||
/// Keep this in sync with `Token::can_begin_literal_or_bool` excluding unary negation.
|
/// Keep this in sync with `Token::can_begin_literal_or_bool` excluding unary negation.
|
||||||
pub fn from_token(token: &Token) -> Option<Lit> {
|
pub fn from_token(token: &Token) -> Option<Lit> {
|
||||||
match token.uninterpolate().kind {
|
match token.uninterpolate().kind {
|
||||||
Ident(name, false) if name.is_bool_lit() => {
|
Ident(name, false) if name.is_bool_lit() => Some(Lit::new(Bool, name, None)),
|
||||||
Some(Lit::new(Bool, name, None))
|
|
||||||
}
|
|
||||||
Literal(token_lit) => Some(token_lit),
|
Literal(token_lit) => Some(token_lit),
|
||||||
Interpolated(ref nt)
|
Interpolated(ref nt)
|
||||||
if let NtExpr(expr) | NtLiteral(expr) = &**nt
|
if let NtExpr(expr) | NtLiteral(expr) = &**nt
|
||||||
&& let ast::ExprKind::Lit(token_lit) = expr.kind =>
|
&& let ast::ExprKind::Lit(token_lit) = expr.kind =>
|
||||||
{
|
{
|
||||||
Some(token_lit)
|
Some(token_lit)
|
||||||
}
|
}
|
||||||
@ -199,6 +197,7 @@ pub fn ident_can_begin_expr(name: Symbol, span: Span, is_raw: bool) -> bool {
|
|||||||
kw::Continue,
|
kw::Continue,
|
||||||
kw::False,
|
kw::False,
|
||||||
kw::For,
|
kw::For,
|
||||||
|
kw::Gen,
|
||||||
kw::If,
|
kw::If,
|
||||||
kw::Let,
|
kw::Let,
|
||||||
kw::Loop,
|
kw::Loop,
|
||||||
@ -229,35 +228,61 @@ fn ident_can_begin_type(name: Symbol, span: Span, is_raw: bool) -> bool {
|
|||||||
#[derive(PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
|
#[derive(PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
|
||||||
pub enum TokenKind {
|
pub enum TokenKind {
|
||||||
/* Expression-operator symbols. */
|
/* Expression-operator symbols. */
|
||||||
|
/// `=`
|
||||||
Eq,
|
Eq,
|
||||||
|
/// `<`
|
||||||
Lt,
|
Lt,
|
||||||
|
/// `<=`
|
||||||
Le,
|
Le,
|
||||||
|
/// `==`
|
||||||
EqEq,
|
EqEq,
|
||||||
|
/// `!=`
|
||||||
Ne,
|
Ne,
|
||||||
|
/// `>`
|
||||||
Ge,
|
Ge,
|
||||||
|
/// `>=`
|
||||||
Gt,
|
Gt,
|
||||||
|
/// `&&`
|
||||||
AndAnd,
|
AndAnd,
|
||||||
|
/// `||`
|
||||||
OrOr,
|
OrOr,
|
||||||
|
/// `!`
|
||||||
Not,
|
Not,
|
||||||
|
/// `~`
|
||||||
Tilde,
|
Tilde,
|
||||||
BinOp(BinOpToken),
|
BinOp(BinOpToken),
|
||||||
BinOpEq(BinOpToken),
|
BinOpEq(BinOpToken),
|
||||||
|
|
||||||
/* Structural symbols */
|
/* Structural symbols */
|
||||||
|
/// `@`
|
||||||
At,
|
At,
|
||||||
|
/// `.`
|
||||||
Dot,
|
Dot,
|
||||||
|
/// `..`
|
||||||
DotDot,
|
DotDot,
|
||||||
|
/// `...`
|
||||||
DotDotDot,
|
DotDotDot,
|
||||||
|
/// `..=`
|
||||||
DotDotEq,
|
DotDotEq,
|
||||||
|
/// `,`
|
||||||
Comma,
|
Comma,
|
||||||
|
/// `;`
|
||||||
Semi,
|
Semi,
|
||||||
|
/// `:`
|
||||||
Colon,
|
Colon,
|
||||||
|
/// `::`
|
||||||
ModSep,
|
ModSep,
|
||||||
|
/// `->`
|
||||||
RArrow,
|
RArrow,
|
||||||
|
/// `<-`
|
||||||
LArrow,
|
LArrow,
|
||||||
|
/// `=>`
|
||||||
FatArrow,
|
FatArrow,
|
||||||
|
/// `#`
|
||||||
Pound,
|
Pound,
|
||||||
|
/// `$`
|
||||||
Dollar,
|
Dollar,
|
||||||
|
/// `?`
|
||||||
Question,
|
Question,
|
||||||
/// Used by proc macros for representing lifetimes, not generated by lexer right now.
|
/// Used by proc macros for representing lifetimes, not generated by lexer right now.
|
||||||
SingleQuote,
|
SingleQuote,
|
||||||
@ -296,6 +321,7 @@ pub enum TokenKind {
|
|||||||
/// similarly to symbols in string literal tokens.
|
/// similarly to symbols in string literal tokens.
|
||||||
DocComment(CommentKind, ast::AttrStyle, Symbol),
|
DocComment(CommentKind, ast::AttrStyle, Symbol),
|
||||||
|
|
||||||
|
/// End Of File
|
||||||
Eof,
|
Eof,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -404,7 +430,7 @@ impl Token {
|
|||||||
[DotDot, DotDotDot, DotDotEq].contains(&self.kind)
|
[DotDot, DotDotDot, DotDotEq].contains(&self.kind)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_op(&self) -> bool {
|
pub fn is_punct(&self) -> bool {
|
||||||
match self.kind {
|
match self.kind {
|
||||||
Eq | Lt | Le | EqEq | Ne | Ge | Gt | AndAnd | OrOr | Not | Tilde | BinOp(_)
|
Eq | Lt | Le | EqEq | Ne | Ge | Gt | AndAnd | OrOr | Not | Tilde | BinOp(_)
|
||||||
| BinOpEq(_) | At | Dot | DotDot | DotDotDot | DotDotEq | Comma | Semi | Colon
|
| BinOpEq(_) | At | Dot | DotDot | DotDotDot | DotDotEq | Comma | Semi | Colon
|
||||||
@ -446,7 +472,7 @@ impl Token {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns `true` if the token can appear at the start of an pattern.
|
/// Returns `true` if the token can appear at the start of a pattern.
|
||||||
///
|
///
|
||||||
/// Shamelessly borrowed from `can_begin_expr`, only used for diagnostics right now.
|
/// Shamelessly borrowed from `can_begin_expr`, only used for diagnostics right now.
|
||||||
pub fn can_begin_pattern(&self) -> bool {
|
pub fn can_begin_pattern(&self) -> bool {
|
||||||
@ -628,7 +654,9 @@ impl Token {
|
|||||||
|
|
||||||
/// Returns `true` if the token is an interpolated path.
|
/// Returns `true` if the token is an interpolated path.
|
||||||
fn is_path(&self) -> bool {
|
fn is_path(&self) -> bool {
|
||||||
if let Interpolated(nt) = &self.kind && let NtPath(..) = **nt {
|
if let Interpolated(nt) = &self.kind
|
||||||
|
&& let NtPath(..) = **nt
|
||||||
|
{
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -650,7 +678,9 @@ impl Token {
|
|||||||
|
|
||||||
/// Is the token an interpolated block (`$b:block`)?
|
/// Is the token an interpolated block (`$b:block`)?
|
||||||
pub fn is_whole_block(&self) -> bool {
|
pub fn is_whole_block(&self) -> bool {
|
||||||
if let Interpolated(nt) = &self.kind && let NtBlock(..) = **nt {
|
if let Interpolated(nt) = &self.kind
|
||||||
|
&& let NtBlock(..) = **nt
|
||||||
|
{
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -550,7 +550,9 @@ impl TokenStream {
|
|||||||
|
|
||||||
let stream_iter = stream.0.iter().cloned();
|
let stream_iter = stream.0.iter().cloned();
|
||||||
|
|
||||||
if let Some(first) = stream.0.first() && Self::try_glue_to_last(vec_mut, first) {
|
if let Some(first) = stream.0.first()
|
||||||
|
&& Self::try_glue_to_last(vec_mut, first)
|
||||||
|
{
|
||||||
// Now skip the first token tree from `stream`.
|
// Now skip the first token tree from `stream`.
|
||||||
vec_mut.extend(stream_iter.skip(1));
|
vec_mut.extend(stream_iter.skip(1));
|
||||||
} else {
|
} else {
|
||||||
|
|||||||
@ -46,7 +46,7 @@ pub fn expr_trailing_brace(mut expr: &ast::Expr) -> Option<&ast::Expr> {
|
|||||||
Closure(closure) => {
|
Closure(closure) => {
|
||||||
expr = &closure.body;
|
expr = &closure.body;
|
||||||
}
|
}
|
||||||
Async(..) | Block(..) | ForLoop(..) | If(..) | Loop(..) | Match(..) | Struct(..)
|
Gen(..) | Block(..) | ForLoop(..) | If(..) | Loop(..) | Match(..) | Struct(..)
|
||||||
| TryBlock(..) | While(..) => break Some(expr),
|
| TryBlock(..) | While(..) => break Some(expr),
|
||||||
_ => break None,
|
_ => break None,
|
||||||
}
|
}
|
||||||
|
|||||||
@ -285,7 +285,7 @@ pub enum ExprPrecedence {
|
|||||||
Block,
|
Block,
|
||||||
TryBlock,
|
TryBlock,
|
||||||
Struct,
|
Struct,
|
||||||
Async,
|
Gen,
|
||||||
Await,
|
Await,
|
||||||
Err,
|
Err,
|
||||||
}
|
}
|
||||||
@ -351,7 +351,7 @@ impl ExprPrecedence {
|
|||||||
| ExprPrecedence::ConstBlock
|
| ExprPrecedence::ConstBlock
|
||||||
| ExprPrecedence::Block
|
| ExprPrecedence::Block
|
||||||
| ExprPrecedence::TryBlock
|
| ExprPrecedence::TryBlock
|
||||||
| ExprPrecedence::Async
|
| ExprPrecedence::Gen
|
||||||
| ExprPrecedence::Struct
|
| ExprPrecedence::Struct
|
||||||
| ExprPrecedence::Err => PREC_PAREN,
|
| ExprPrecedence::Err => PREC_PAREN,
|
||||||
}
|
}
|
||||||
|
|||||||
@ -251,6 +251,9 @@ pub trait Visitor<'ast>: Sized {
|
|||||||
fn visit_inline_asm_sym(&mut self, sym: &'ast InlineAsmSym) {
|
fn visit_inline_asm_sym(&mut self, sym: &'ast InlineAsmSym) {
|
||||||
walk_inline_asm_sym(self, sym)
|
walk_inline_asm_sym(self, sym)
|
||||||
}
|
}
|
||||||
|
fn visit_capture_by(&mut self, _capture_by: &'ast CaptureBy) {
|
||||||
|
// Nothing to do
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
@ -857,7 +860,7 @@ pub fn walk_expr<'a, V: Visitor<'a>>(visitor: &mut V, expression: &'a Expr) {
|
|||||||
}
|
}
|
||||||
ExprKind::Closure(box Closure {
|
ExprKind::Closure(box Closure {
|
||||||
binder,
|
binder,
|
||||||
capture_clause: _,
|
capture_clause,
|
||||||
asyncness: _,
|
asyncness: _,
|
||||||
constness: _,
|
constness: _,
|
||||||
movability: _,
|
movability: _,
|
||||||
@ -866,13 +869,14 @@ pub fn walk_expr<'a, V: Visitor<'a>>(visitor: &mut V, expression: &'a Expr) {
|
|||||||
fn_decl_span: _,
|
fn_decl_span: _,
|
||||||
fn_arg_span: _,
|
fn_arg_span: _,
|
||||||
}) => {
|
}) => {
|
||||||
|
visitor.visit_capture_by(capture_clause);
|
||||||
visitor.visit_fn(FnKind::Closure(binder, fn_decl, body), expression.span, expression.id)
|
visitor.visit_fn(FnKind::Closure(binder, fn_decl, body), expression.span, expression.id)
|
||||||
}
|
}
|
||||||
ExprKind::Block(block, opt_label) => {
|
ExprKind::Block(block, opt_label) => {
|
||||||
walk_list!(visitor, visit_label, opt_label);
|
walk_list!(visitor, visit_label, opt_label);
|
||||||
visitor.visit_block(block);
|
visitor.visit_block(block);
|
||||||
}
|
}
|
||||||
ExprKind::Async(_, body) => {
|
ExprKind::Gen(_, body, _) => {
|
||||||
visitor.visit_block(body);
|
visitor.visit_block(body);
|
||||||
}
|
}
|
||||||
ExprKind::Await(expr, _) => visitor.visit_expr(expr),
|
ExprKind::Await(expr, _) => visitor.visit_expr(expr),
|
||||||
|
|||||||
@ -7,18 +7,20 @@ edition = "2021"
|
|||||||
doctest = false
|
doctest = false
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
# tidy-alphabetical-start
|
||||||
rustc_ast = { path = "../rustc_ast" }
|
rustc_ast = { path = "../rustc_ast" }
|
||||||
rustc_ast_pretty = { path = "../rustc_ast_pretty" }
|
rustc_ast_pretty = { path = "../rustc_ast_pretty" }
|
||||||
rustc_data_structures = { path = "../rustc_data_structures" }
|
rustc_data_structures = { path = "../rustc_data_structures" }
|
||||||
rustc_errors = { path = "../rustc_errors" }
|
rustc_errors = { path = "../rustc_errors" }
|
||||||
rustc_hir = { path = "../rustc_hir" }
|
|
||||||
rustc_fluent_macro = { path = "../rustc_fluent_macro" }
|
rustc_fluent_macro = { path = "../rustc_fluent_macro" }
|
||||||
|
rustc_hir = { path = "../rustc_hir" }
|
||||||
rustc_index = { path = "../rustc_index" }
|
rustc_index = { path = "../rustc_index" }
|
||||||
rustc_middle = { path = "../rustc_middle" }
|
|
||||||
rustc_macros = { path = "../rustc_macros" }
|
rustc_macros = { path = "../rustc_macros" }
|
||||||
|
rustc_middle = { path = "../rustc_middle" }
|
||||||
rustc_session = { path = "../rustc_session" }
|
rustc_session = { path = "../rustc_session" }
|
||||||
rustc_span = { path = "../rustc_span" }
|
rustc_span = { path = "../rustc_span" }
|
||||||
rustc_target = { path = "../rustc_target" }
|
rustc_target = { path = "../rustc_target" }
|
||||||
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
|
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
|
||||||
thin-vec = "0.2.12"
|
thin-vec = "0.2.12"
|
||||||
tracing = "0.1"
|
tracing = "0.1"
|
||||||
|
# tidy-alphabetical-end
|
||||||
|
|||||||
@ -11,8 +11,8 @@ ast_lowering_argument = argument
|
|||||||
ast_lowering_assoc_ty_parentheses =
|
ast_lowering_assoc_ty_parentheses =
|
||||||
parenthesized generic arguments cannot be used in associated type constraints
|
parenthesized generic arguments cannot be used in associated type constraints
|
||||||
|
|
||||||
ast_lowering_async_generators_not_supported =
|
ast_lowering_async_coroutines_not_supported =
|
||||||
`async` generators are not yet supported
|
`async` coroutines are not yet supported
|
||||||
|
|
||||||
ast_lowering_async_non_move_closure_not_supported =
|
ast_lowering_async_non_move_closure_not_supported =
|
||||||
`async` non-`move` closures with parameters are not currently supported
|
`async` non-`move` closures with parameters are not currently supported
|
||||||
@ -42,6 +42,9 @@ ast_lowering_clobber_abi_not_supported =
|
|||||||
|
|
||||||
ast_lowering_closure_cannot_be_static = closures cannot be static
|
ast_lowering_closure_cannot_be_static = closures cannot be static
|
||||||
|
|
||||||
|
ast_lowering_coroutine_too_many_parameters =
|
||||||
|
too many parameters for a coroutine (expected 0 or 1 parameters)
|
||||||
|
|
||||||
ast_lowering_does_not_support_modifiers =
|
ast_lowering_does_not_support_modifiers =
|
||||||
the `{$class_name}` register class does not support template modifiers
|
the `{$class_name}` register class does not support template modifiers
|
||||||
|
|
||||||
@ -53,9 +56,6 @@ ast_lowering_functional_record_update_destructuring_assignment =
|
|||||||
functional record updates are not allowed in destructuring assignments
|
functional record updates are not allowed in destructuring assignments
|
||||||
.suggestion = consider removing the trailing pattern
|
.suggestion = consider removing the trailing pattern
|
||||||
|
|
||||||
ast_lowering_generator_too_many_parameters =
|
|
||||||
too many parameters for a generator (expected 0 or 1 parameters)
|
|
||||||
|
|
||||||
ast_lowering_generic_type_with_parentheses =
|
ast_lowering_generic_type_with_parentheses =
|
||||||
parenthesized type parameters may only be used with a `Fn` trait
|
parenthesized type parameters may only be used with a `Fn` trait
|
||||||
.label = only `Fn` traits may use parentheses
|
.label = only `Fn` traits may use parentheses
|
||||||
@ -99,7 +99,7 @@ ast_lowering_misplaced_double_dot =
|
|||||||
.note = only allowed in tuple, tuple struct, and slice patterns
|
.note = only allowed in tuple, tuple struct, and slice patterns
|
||||||
|
|
||||||
ast_lowering_misplaced_impl_trait =
|
ast_lowering_misplaced_impl_trait =
|
||||||
`impl Trait` only allowed in function and inherent method return types, not in {$position}
|
`impl Trait` only allowed in function and inherent method argument and return types, not in {$position}
|
||||||
|
|
||||||
ast_lowering_misplaced_relax_trait_bound =
|
ast_lowering_misplaced_relax_trait_bound =
|
||||||
`?Trait` bounds are only permitted at the point where a type parameter is declared
|
`?Trait` bounds are only permitted at the point where a type parameter is declared
|
||||||
@ -136,12 +136,6 @@ ast_lowering_template_modifier = template modifier
|
|||||||
|
|
||||||
ast_lowering_this_not_async = this is not `async`
|
ast_lowering_this_not_async = this is not `async`
|
||||||
|
|
||||||
ast_lowering_trait_fn_async =
|
|
||||||
functions in traits cannot be declared `async`
|
|
||||||
.label = `async` because of this
|
|
||||||
.note = `async` trait functions are not currently supported
|
|
||||||
.note2 = consider using the `async-trait` crate: https://crates.io/crates/async-trait
|
|
||||||
|
|
||||||
ast_lowering_underscore_expr_lhs_assign =
|
ast_lowering_underscore_expr_lhs_assign =
|
||||||
in expressions, `_` can only be used on the left-hand side of an assignment
|
in expressions, `_` can only be used on the left-hand side of an assignment
|
||||||
.label = `_` not allowed here
|
.label = `_` not allowed here
|
||||||
|
|||||||
@ -131,8 +131,8 @@ pub struct AwaitOnlyInAsyncFnAndBlocks {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Diagnostic, Clone, Copy)]
|
#[derive(Diagnostic, Clone, Copy)]
|
||||||
#[diag(ast_lowering_generator_too_many_parameters, code = "E0628")]
|
#[diag(ast_lowering_coroutine_too_many_parameters, code = "E0628")]
|
||||||
pub struct GeneratorTooManyParameters {
|
pub struct CoroutineTooManyParameters {
|
||||||
#[primary_span]
|
#[primary_span]
|
||||||
pub fn_decl_span: Span,
|
pub fn_decl_span: Span,
|
||||||
}
|
}
|
||||||
@ -161,8 +161,8 @@ pub struct FunctionalRecordUpdateDestructuringAssignment {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Diagnostic, Clone, Copy)]
|
#[derive(Diagnostic, Clone, Copy)]
|
||||||
#[diag(ast_lowering_async_generators_not_supported, code = "E0727")]
|
#[diag(ast_lowering_async_coroutines_not_supported, code = "E0727")]
|
||||||
pub struct AsyncGeneratorsNotSupported {
|
pub struct AsyncCoroutinesNotSupported {
|
||||||
#[primary_span]
|
#[primary_span]
|
||||||
pub span: Span,
|
pub span: Span,
|
||||||
}
|
}
|
||||||
@ -354,17 +354,6 @@ pub struct InclusiveRangeWithNoEnd {
|
|||||||
pub span: Span,
|
pub span: Span,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Diagnostic, Clone, Copy)]
|
|
||||||
#[diag(ast_lowering_trait_fn_async, code = "E0706")]
|
|
||||||
#[note]
|
|
||||||
#[note(ast_lowering_note2)]
|
|
||||||
pub struct TraitFnAsync {
|
|
||||||
#[primary_span]
|
|
||||||
pub fn_span: Span,
|
|
||||||
#[label]
|
|
||||||
pub span: Span,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Diagnostic)]
|
#[derive(Diagnostic)]
|
||||||
pub enum BadReturnTypeNotation {
|
pub enum BadReturnTypeNotation {
|
||||||
#[diag(ast_lowering_bad_return_type_notation_inputs)]
|
#[diag(ast_lowering_bad_return_type_notation_inputs)]
|
||||||
|
|||||||
@ -1,8 +1,8 @@
|
|||||||
use super::errors::{
|
use super::errors::{
|
||||||
AsyncGeneratorsNotSupported, AsyncNonMoveClosureNotSupported, AwaitOnlyInAsyncFnAndBlocks,
|
AsyncCoroutinesNotSupported, AsyncNonMoveClosureNotSupported, AwaitOnlyInAsyncFnAndBlocks,
|
||||||
BaseExpressionDoubleDot, ClosureCannotBeStatic, FunctionalRecordUpdateDestructuringAssignment,
|
BaseExpressionDoubleDot, ClosureCannotBeStatic, CoroutineTooManyParameters,
|
||||||
GeneratorTooManyParameters, InclusiveRangeWithNoEnd, NotSupportedForLifetimeBinderAsyncClosure,
|
FunctionalRecordUpdateDestructuringAssignment, InclusiveRangeWithNoEnd,
|
||||||
UnderscoreExprLhsAssign,
|
NotSupportedForLifetimeBinderAsyncClosure, UnderscoreExprLhsAssign,
|
||||||
};
|
};
|
||||||
use super::ResolverAstLoweringExt;
|
use super::ResolverAstLoweringExt;
|
||||||
use super::{ImplTraitContext, LoweringContext, ParamMode, ParenthesizedGenericArgs};
|
use super::{ImplTraitContext, LoweringContext, ParamMode, ParenthesizedGenericArgs};
|
||||||
@ -15,9 +15,10 @@ use rustc_hir as hir;
|
|||||||
use rustc_hir::def::Res;
|
use rustc_hir::def::Res;
|
||||||
use rustc_hir::definitions::DefPathData;
|
use rustc_hir::definitions::DefPathData;
|
||||||
use rustc_session::errors::report_lit_error;
|
use rustc_session::errors::report_lit_error;
|
||||||
use rustc_span::source_map::{respan, DesugaringKind, Span, Spanned};
|
use rustc_span::source_map::{respan, Spanned};
|
||||||
use rustc_span::symbol::{sym, Ident, Symbol};
|
use rustc_span::symbol::{sym, Ident, Symbol};
|
||||||
use rustc_span::DUMMY_SP;
|
use rustc_span::DUMMY_SP;
|
||||||
|
use rustc_span::{DesugaringKind, Span};
|
||||||
use thin_vec::{thin_vec, ThinVec};
|
use thin_vec::{thin_vec, ThinVec};
|
||||||
|
|
||||||
impl<'hir> LoweringContext<'_, 'hir> {
|
impl<'hir> LoweringContext<'_, 'hir> {
|
||||||
@ -183,12 +184,12 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||||||
self.arena.alloc_from_iter(arms.iter().map(|x| self.lower_arm(x))),
|
self.arena.alloc_from_iter(arms.iter().map(|x| self.lower_arm(x))),
|
||||||
hir::MatchSource::Normal,
|
hir::MatchSource::Normal,
|
||||||
),
|
),
|
||||||
ExprKind::Async(capture_clause, block) => self.make_async_expr(
|
ExprKind::Gen(capture_clause, block, GenBlockKind::Async) => self.make_async_expr(
|
||||||
*capture_clause,
|
*capture_clause,
|
||||||
e.id,
|
e.id,
|
||||||
None,
|
None,
|
||||||
e.span,
|
e.span,
|
||||||
hir::AsyncGeneratorKind::Block,
|
hir::CoroutineSource::Block,
|
||||||
|this| this.with_new_scopes(|this| this.lower_block_expr(block)),
|
|this| this.with_new_scopes(|this| this.lower_block_expr(block)),
|
||||||
),
|
),
|
||||||
ExprKind::Await(expr, await_kw_span) => self.lower_expr_await(*await_kw_span, expr),
|
ExprKind::Await(expr, await_kw_span) => self.lower_expr_await(*await_kw_span, expr),
|
||||||
@ -317,6 +318,14 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||||||
rest,
|
rest,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
ExprKind::Gen(capture_clause, block, GenBlockKind::Gen) => self.make_gen_expr(
|
||||||
|
*capture_clause,
|
||||||
|
e.id,
|
||||||
|
None,
|
||||||
|
e.span,
|
||||||
|
hir::CoroutineSource::Block,
|
||||||
|
|this| this.with_new_scopes(|this| this.lower_block_expr(block)),
|
||||||
|
),
|
||||||
ExprKind::Yield(opt_expr) => self.lower_expr_yield(e.span, opt_expr.as_deref()),
|
ExprKind::Yield(opt_expr) => self.lower_expr_yield(e.span, opt_expr.as_deref()),
|
||||||
ExprKind::Err => hir::ExprKind::Err(
|
ExprKind::Err => hir::ExprKind::Err(
|
||||||
self.tcx.sess.delay_span_bug(e.span, "lowered ExprKind::Err"),
|
self.tcx.sess.delay_span_bug(e.span, "lowered ExprKind::Err"),
|
||||||
@ -583,7 +592,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Lower an `async` construct to a generator that implements `Future`.
|
/// Lower an `async` construct to a coroutine that implements `Future`.
|
||||||
///
|
///
|
||||||
/// This results in:
|
/// This results in:
|
||||||
///
|
///
|
||||||
@ -598,7 +607,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||||||
closure_node_id: NodeId,
|
closure_node_id: NodeId,
|
||||||
ret_ty: Option<hir::FnRetTy<'hir>>,
|
ret_ty: Option<hir::FnRetTy<'hir>>,
|
||||||
span: Span,
|
span: Span,
|
||||||
async_gen_kind: hir::AsyncGeneratorKind,
|
async_coroutine_source: hir::CoroutineSource,
|
||||||
body: impl FnOnce(&mut Self) -> hir::Expr<'hir>,
|
body: impl FnOnce(&mut Self) -> hir::Expr<'hir>,
|
||||||
) -> hir::ExprKind<'hir> {
|
) -> hir::ExprKind<'hir> {
|
||||||
let output = ret_ty.unwrap_or_else(|| hir::FnRetTy::DefaultReturn(self.lower_span(span)));
|
let output = ret_ty.unwrap_or_else(|| hir::FnRetTy::DefaultReturn(self.lower_span(span)));
|
||||||
@ -613,7 +622,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||||||
span: unstable_span,
|
span: unstable_span,
|
||||||
};
|
};
|
||||||
|
|
||||||
// The closure/generator `FnDecl` takes a single (resume) argument of type `input_ty`.
|
// The closure/coroutine `FnDecl` takes a single (resume) argument of type `input_ty`.
|
||||||
let fn_decl = self.arena.alloc(hir::FnDecl {
|
let fn_decl = self.arena.alloc(hir::FnDecl {
|
||||||
inputs: arena_vec![self; input_ty],
|
inputs: arena_vec![self; input_ty],
|
||||||
output,
|
output,
|
||||||
@ -637,7 +646,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||||||
let params = arena_vec![self; param];
|
let params = arena_vec![self; param];
|
||||||
|
|
||||||
let body = self.lower_body(move |this| {
|
let body = self.lower_body(move |this| {
|
||||||
this.generator_kind = Some(hir::GeneratorKind::Async(async_gen_kind));
|
this.coroutine_kind = Some(hir::CoroutineKind::Async(async_coroutine_source));
|
||||||
|
|
||||||
let old_ctx = this.task_context;
|
let old_ctx = this.task_context;
|
||||||
this.task_context = Some(task_context_hid);
|
this.task_context = Some(task_context_hid);
|
||||||
@ -661,6 +670,57 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Lower a `gen` construct to a generator that implements `Iterator`.
|
||||||
|
///
|
||||||
|
/// This results in:
|
||||||
|
///
|
||||||
|
/// ```text
|
||||||
|
/// static move? |()| -> () {
|
||||||
|
/// <body>
|
||||||
|
/// }
|
||||||
|
/// ```
|
||||||
|
pub(super) fn make_gen_expr(
|
||||||
|
&mut self,
|
||||||
|
capture_clause: CaptureBy,
|
||||||
|
closure_node_id: NodeId,
|
||||||
|
_yield_ty: Option<hir::FnRetTy<'hir>>,
|
||||||
|
span: Span,
|
||||||
|
coroutine_source: hir::CoroutineSource,
|
||||||
|
body: impl FnOnce(&mut Self) -> hir::Expr<'hir>,
|
||||||
|
) -> hir::ExprKind<'hir> {
|
||||||
|
let output = hir::FnRetTy::DefaultReturn(self.lower_span(span));
|
||||||
|
|
||||||
|
// The closure/generator `FnDecl` takes a single (resume) argument of type `input_ty`.
|
||||||
|
let fn_decl = self.arena.alloc(hir::FnDecl {
|
||||||
|
inputs: &[],
|
||||||
|
output,
|
||||||
|
c_variadic: false,
|
||||||
|
implicit_self: hir::ImplicitSelfKind::None,
|
||||||
|
lifetime_elision_allowed: false,
|
||||||
|
});
|
||||||
|
|
||||||
|
let body = self.lower_body(move |this| {
|
||||||
|
this.coroutine_kind = Some(hir::CoroutineKind::Gen(coroutine_source));
|
||||||
|
|
||||||
|
let res = body(this);
|
||||||
|
(&[], res)
|
||||||
|
});
|
||||||
|
|
||||||
|
// `static |()| -> () { body }`:
|
||||||
|
hir::ExprKind::Closure(self.arena.alloc(hir::Closure {
|
||||||
|
def_id: self.local_def_id(closure_node_id),
|
||||||
|
binder: hir::ClosureBinder::Default,
|
||||||
|
capture_clause,
|
||||||
|
bound_generic_params: &[],
|
||||||
|
fn_decl,
|
||||||
|
body,
|
||||||
|
fn_decl_span: self.lower_span(span),
|
||||||
|
fn_arg_span: None,
|
||||||
|
movability: Some(Movability::Movable),
|
||||||
|
constness: hir::Constness::NotConst,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
/// Forwards a possible `#[track_caller]` annotation from `outer_hir_id` to
|
/// Forwards a possible `#[track_caller]` annotation from `outer_hir_id` to
|
||||||
/// `inner_hir_id` in case the `async_fn_track_caller` feature is enabled.
|
/// `inner_hir_id` in case the `async_fn_track_caller` feature is enabled.
|
||||||
pub(super) fn maybe_forward_track_caller(
|
pub(super) fn maybe_forward_track_caller(
|
||||||
@ -673,12 +733,18 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||||||
&& let Some(attrs) = self.attrs.get(&outer_hir_id.local_id)
|
&& let Some(attrs) = self.attrs.get(&outer_hir_id.local_id)
|
||||||
&& attrs.into_iter().any(|attr| attr.has_name(sym::track_caller))
|
&& attrs.into_iter().any(|attr| attr.has_name(sym::track_caller))
|
||||||
{
|
{
|
||||||
let unstable_span =
|
let unstable_span = self.mark_span_with_reason(
|
||||||
self.mark_span_with_reason(DesugaringKind::Async, span, self.allow_gen_future.clone());
|
DesugaringKind::Async,
|
||||||
|
span,
|
||||||
|
self.allow_gen_future.clone(),
|
||||||
|
);
|
||||||
self.lower_attrs(
|
self.lower_attrs(
|
||||||
inner_hir_id,
|
inner_hir_id,
|
||||||
&[Attribute {
|
&[Attribute {
|
||||||
kind: AttrKind::Normal(ptr::P(NormalAttr::from_ident(Ident::new(sym::track_caller, span)))),
|
kind: AttrKind::Normal(ptr::P(NormalAttr::from_ident(Ident::new(
|
||||||
|
sym::track_caller,
|
||||||
|
span,
|
||||||
|
)))),
|
||||||
id: self.tcx.sess.parse_sess.attr_id_generator.mk_attr_id(),
|
id: self.tcx.sess.parse_sess.attr_id_generator.mk_attr_id(),
|
||||||
style: AttrStyle::Outer,
|
style: AttrStyle::Outer,
|
||||||
span: unstable_span,
|
span: unstable_span,
|
||||||
@ -704,9 +770,9 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||||||
/// ```
|
/// ```
|
||||||
fn lower_expr_await(&mut self, await_kw_span: Span, expr: &Expr) -> hir::ExprKind<'hir> {
|
fn lower_expr_await(&mut self, await_kw_span: Span, expr: &Expr) -> hir::ExprKind<'hir> {
|
||||||
let full_span = expr.span.to(await_kw_span);
|
let full_span = expr.span.to(await_kw_span);
|
||||||
match self.generator_kind {
|
match self.coroutine_kind {
|
||||||
Some(hir::GeneratorKind::Async(_)) => {}
|
Some(hir::CoroutineKind::Async(_)) => {}
|
||||||
Some(hir::GeneratorKind::Gen) | None => {
|
Some(hir::CoroutineKind::Coroutine) | Some(hir::CoroutineKind::Gen(_)) | None => {
|
||||||
self.tcx.sess.emit_err(AwaitOnlyInAsyncFnAndBlocks {
|
self.tcx.sess.emit_err(AwaitOnlyInAsyncFnAndBlocks {
|
||||||
await_kw_span,
|
await_kw_span,
|
||||||
item_span: self.current_item,
|
item_span: self.current_item,
|
||||||
@ -881,19 +947,19 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||||||
) -> hir::ExprKind<'hir> {
|
) -> hir::ExprKind<'hir> {
|
||||||
let (binder_clause, generic_params) = self.lower_closure_binder(binder);
|
let (binder_clause, generic_params) = self.lower_closure_binder(binder);
|
||||||
|
|
||||||
let (body_id, generator_option) = self.with_new_scopes(move |this| {
|
let (body_id, coroutine_option) = self.with_new_scopes(move |this| {
|
||||||
let prev = this.current_item;
|
let prev = this.current_item;
|
||||||
this.current_item = Some(fn_decl_span);
|
this.current_item = Some(fn_decl_span);
|
||||||
let mut generator_kind = None;
|
let mut coroutine_kind = None;
|
||||||
let body_id = this.lower_fn_body(decl, |this| {
|
let body_id = this.lower_fn_body(decl, |this| {
|
||||||
let e = this.lower_expr_mut(body);
|
let e = this.lower_expr_mut(body);
|
||||||
generator_kind = this.generator_kind;
|
coroutine_kind = this.coroutine_kind;
|
||||||
e
|
e
|
||||||
});
|
});
|
||||||
let generator_option =
|
let coroutine_option =
|
||||||
this.generator_movability_for_fn(&decl, fn_decl_span, generator_kind, movability);
|
this.coroutine_movability_for_fn(&decl, fn_decl_span, coroutine_kind, movability);
|
||||||
this.current_item = prev;
|
this.current_item = prev;
|
||||||
(body_id, generator_option)
|
(body_id, coroutine_option)
|
||||||
});
|
});
|
||||||
|
|
||||||
let bound_generic_params = self.lower_lifetime_binder(closure_id, generic_params);
|
let bound_generic_params = self.lower_lifetime_binder(closure_id, generic_params);
|
||||||
@ -909,29 +975,29 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||||||
body: body_id,
|
body: body_id,
|
||||||
fn_decl_span: self.lower_span(fn_decl_span),
|
fn_decl_span: self.lower_span(fn_decl_span),
|
||||||
fn_arg_span: Some(self.lower_span(fn_arg_span)),
|
fn_arg_span: Some(self.lower_span(fn_arg_span)),
|
||||||
movability: generator_option,
|
movability: coroutine_option,
|
||||||
constness: self.lower_constness(constness),
|
constness: self.lower_constness(constness),
|
||||||
});
|
});
|
||||||
|
|
||||||
hir::ExprKind::Closure(c)
|
hir::ExprKind::Closure(c)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn generator_movability_for_fn(
|
fn coroutine_movability_for_fn(
|
||||||
&mut self,
|
&mut self,
|
||||||
decl: &FnDecl,
|
decl: &FnDecl,
|
||||||
fn_decl_span: Span,
|
fn_decl_span: Span,
|
||||||
generator_kind: Option<hir::GeneratorKind>,
|
coroutine_kind: Option<hir::CoroutineKind>,
|
||||||
movability: Movability,
|
movability: Movability,
|
||||||
) -> Option<hir::Movability> {
|
) -> Option<hir::Movability> {
|
||||||
match generator_kind {
|
match coroutine_kind {
|
||||||
Some(hir::GeneratorKind::Gen) => {
|
Some(hir::CoroutineKind::Coroutine) => {
|
||||||
if decl.inputs.len() > 1 {
|
if decl.inputs.len() > 1 {
|
||||||
self.tcx.sess.emit_err(GeneratorTooManyParameters { fn_decl_span });
|
self.tcx.sess.emit_err(CoroutineTooManyParameters { fn_decl_span });
|
||||||
}
|
}
|
||||||
Some(movability)
|
Some(movability)
|
||||||
}
|
}
|
||||||
Some(hir::GeneratorKind::Async(_)) => {
|
Some(hir::CoroutineKind::Gen(_)) | Some(hir::CoroutineKind::Async(_)) => {
|
||||||
panic!("non-`async` closure body turned `async` during lowering");
|
panic!("non-`async`/`gen` closure body turned `async`/`gen` during lowering");
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
if movability == Movability::Static {
|
if movability == Movability::Static {
|
||||||
@ -999,7 +1065,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||||||
inner_closure_id,
|
inner_closure_id,
|
||||||
async_ret_ty,
|
async_ret_ty,
|
||||||
body.span,
|
body.span,
|
||||||
hir::AsyncGeneratorKind::Closure,
|
hir::CoroutineSource::Closure,
|
||||||
|this| this.with_new_scopes(|this| this.lower_expr_mut(body)),
|
|this| this.with_new_scopes(|this| this.lower_expr_mut(body)),
|
||||||
);
|
);
|
||||||
let hir_id = this.lower_node_id(inner_closure_id);
|
let hir_id = this.lower_node_id(inner_closure_id);
|
||||||
@ -1102,7 +1168,9 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||||||
if let ExprKind::Path(qself, path) = &expr.kind {
|
if let ExprKind::Path(qself, path) = &expr.kind {
|
||||||
// Does the path resolve to something disallowed in a tuple struct/variant pattern?
|
// Does the path resolve to something disallowed in a tuple struct/variant pattern?
|
||||||
if let Some(partial_res) = self.resolver.get_partial_res(expr.id) {
|
if let Some(partial_res) = self.resolver.get_partial_res(expr.id) {
|
||||||
if let Some(res) = partial_res.full_res() && !res.expected_in_tuple_struct_pat() {
|
if let Some(res) = partial_res.full_res()
|
||||||
|
&& !res.expected_in_tuple_struct_pat()
|
||||||
|
{
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1122,7 +1190,9 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||||||
if let ExprKind::Path(qself, path) = &expr.kind {
|
if let ExprKind::Path(qself, path) = &expr.kind {
|
||||||
// Does the path resolve to something disallowed in a unit struct/variant pattern?
|
// Does the path resolve to something disallowed in a unit struct/variant pattern?
|
||||||
if let Some(partial_res) = self.resolver.get_partial_res(expr.id) {
|
if let Some(partial_res) = self.resolver.get_partial_res(expr.id) {
|
||||||
if let Some(res) = partial_res.full_res() && !res.expected_in_unit_struct_pat() {
|
if let Some(res) = partial_res.full_res()
|
||||||
|
&& !res.expected_in_unit_struct_pat()
|
||||||
|
{
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1434,12 +1504,23 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn lower_expr_yield(&mut self, span: Span, opt_expr: Option<&Expr>) -> hir::ExprKind<'hir> {
|
fn lower_expr_yield(&mut self, span: Span, opt_expr: Option<&Expr>) -> hir::ExprKind<'hir> {
|
||||||
match self.generator_kind {
|
match self.coroutine_kind {
|
||||||
Some(hir::GeneratorKind::Gen) => {}
|
Some(hir::CoroutineKind::Gen(_)) => {}
|
||||||
Some(hir::GeneratorKind::Async(_)) => {
|
Some(hir::CoroutineKind::Async(_)) => {
|
||||||
self.tcx.sess.emit_err(AsyncGeneratorsNotSupported { span });
|
self.tcx.sess.emit_err(AsyncCoroutinesNotSupported { span });
|
||||||
|
}
|
||||||
|
Some(hir::CoroutineKind::Coroutine) | None => {
|
||||||
|
if !self.tcx.features().coroutines {
|
||||||
|
rustc_session::parse::feature_err(
|
||||||
|
&self.tcx.sess.parse_sess,
|
||||||
|
sym::coroutines,
|
||||||
|
span,
|
||||||
|
"yield syntax is experimental",
|
||||||
|
)
|
||||||
|
.emit();
|
||||||
|
}
|
||||||
|
self.coroutine_kind = Some(hir::CoroutineKind::Coroutine)
|
||||||
}
|
}
|
||||||
None => self.generator_kind = Some(hir::GeneratorKind::Gen),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let expr =
|
let expr =
|
||||||
|
|||||||
@ -61,9 +61,12 @@ fn flatten_format_args(mut fmt: Cow<'_, FormatArgs>) -> Cow<'_, FormatArgs> {
|
|||||||
let remaining_args = args.split_off(arg_index + 1);
|
let remaining_args = args.split_off(arg_index + 1);
|
||||||
let old_arg_offset = args.len();
|
let old_arg_offset = args.len();
|
||||||
let mut fmt2 = &mut args.pop().unwrap().expr; // The inner FormatArgs.
|
let mut fmt2 = &mut args.pop().unwrap().expr; // The inner FormatArgs.
|
||||||
let fmt2 = loop { // Unwrap the Expr to get to the FormatArgs.
|
let fmt2 = loop {
|
||||||
|
// Unwrap the Expr to get to the FormatArgs.
|
||||||
match &mut fmt2.kind {
|
match &mut fmt2.kind {
|
||||||
ExprKind::Paren(inner) | ExprKind::AddrOf(BorrowKind::Ref, _, inner) => fmt2 = inner,
|
ExprKind::Paren(inner) | ExprKind::AddrOf(BorrowKind::Ref, _, inner) => {
|
||||||
|
fmt2 = inner
|
||||||
|
}
|
||||||
ExprKind::FormatArgs(fmt2) => break fmt2,
|
ExprKind::FormatArgs(fmt2) => break fmt2,
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
}
|
}
|
||||||
@ -410,15 +413,11 @@ fn expand_format_args<'hir>(
|
|||||||
let format_options = use_format_options.then(|| {
|
let format_options = use_format_options.then(|| {
|
||||||
// Generate:
|
// Generate:
|
||||||
// &[format_spec_0, format_spec_1, format_spec_2]
|
// &[format_spec_0, format_spec_1, format_spec_2]
|
||||||
let elements: Vec<_> = fmt
|
let elements = ctx.arena.alloc_from_iter(fmt.template.iter().filter_map(|piece| {
|
||||||
.template
|
let FormatArgsPiece::Placeholder(placeholder) = piece else { return None };
|
||||||
.iter()
|
Some(make_format_spec(ctx, macsp, placeholder, &mut argmap))
|
||||||
.filter_map(|piece| {
|
}));
|
||||||
let FormatArgsPiece::Placeholder(placeholder) = piece else { return None };
|
ctx.expr_array_ref(macsp, elements)
|
||||||
Some(make_format_spec(ctx, macsp, placeholder, &mut argmap))
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
ctx.expr_array_ref(macsp, ctx.arena.alloc_from_iter(elements))
|
|
||||||
});
|
});
|
||||||
|
|
||||||
let arguments = fmt.arguments.all_args();
|
let arguments = fmt.arguments.all_args();
|
||||||
@ -477,10 +476,8 @@ fn expand_format_args<'hir>(
|
|||||||
// <core::fmt::Argument>::new_debug(&arg2),
|
// <core::fmt::Argument>::new_debug(&arg2),
|
||||||
// …
|
// …
|
||||||
// ]
|
// ]
|
||||||
let elements: Vec<_> = arguments
|
let elements = ctx.arena.alloc_from_iter(arguments.iter().zip(argmap).map(
|
||||||
.iter()
|
|(arg, ((_, ty), placeholder_span))| {
|
||||||
.zip(argmap)
|
|
||||||
.map(|(arg, ((_, ty), placeholder_span))| {
|
|
||||||
let placeholder_span =
|
let placeholder_span =
|
||||||
placeholder_span.unwrap_or(arg.expr.span).with_ctxt(macsp.ctxt());
|
placeholder_span.unwrap_or(arg.expr.span).with_ctxt(macsp.ctxt());
|
||||||
let arg_span = match arg.kind {
|
let arg_span = match arg.kind {
|
||||||
@ -493,9 +490,9 @@ fn expand_format_args<'hir>(
|
|||||||
hir::ExprKind::AddrOf(hir::BorrowKind::Ref, hir::Mutability::Not, arg),
|
hir::ExprKind::AddrOf(hir::BorrowKind::Ref, hir::Mutability::Not, arg),
|
||||||
));
|
));
|
||||||
make_argument(ctx, placeholder_span, ref_arg, ty)
|
make_argument(ctx, placeholder_span, ref_arg, ty)
|
||||||
})
|
},
|
||||||
.collect();
|
));
|
||||||
ctx.expr_array_ref(macsp, ctx.arena.alloc_from_iter(elements))
|
ctx.expr_array_ref(macsp, elements)
|
||||||
} else {
|
} else {
|
||||||
// Generate:
|
// Generate:
|
||||||
// &match (&arg0, &arg1, &…) {
|
// &match (&arg0, &arg1, &…) {
|
||||||
@ -528,19 +525,14 @@ fn expand_format_args<'hir>(
|
|||||||
make_argument(ctx, placeholder_span, arg, ty)
|
make_argument(ctx, placeholder_span, arg, ty)
|
||||||
},
|
},
|
||||||
));
|
));
|
||||||
let elements: Vec<_> = arguments
|
let elements = ctx.arena.alloc_from_iter(arguments.iter().map(|arg| {
|
||||||
.iter()
|
let arg_expr = ctx.lower_expr(&arg.expr);
|
||||||
.map(|arg| {
|
ctx.expr(
|
||||||
let arg_expr = ctx.lower_expr(&arg.expr);
|
arg.expr.span.with_ctxt(macsp.ctxt()),
|
||||||
ctx.expr(
|
hir::ExprKind::AddrOf(hir::BorrowKind::Ref, hir::Mutability::Not, arg_expr),
|
||||||
arg.expr.span.with_ctxt(macsp.ctxt()),
|
)
|
||||||
hir::ExprKind::AddrOf(hir::BorrowKind::Ref, hir::Mutability::Not, arg_expr),
|
}));
|
||||||
)
|
let args_tuple = ctx.arena.alloc(ctx.expr(macsp, hir::ExprKind::Tup(elements)));
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
let args_tuple = ctx
|
|
||||||
.arena
|
|
||||||
.alloc(ctx.expr(macsp, hir::ExprKind::Tup(ctx.arena.alloc_from_iter(elements))));
|
|
||||||
let array = ctx.arena.alloc(ctx.expr(macsp, hir::ExprKind::Array(args)));
|
let array = ctx.arena.alloc(ctx.expr(macsp, hir::ExprKind::Array(args)));
|
||||||
let match_arms = ctx.arena.alloc_from_iter([ctx.arm(args_pat, array)]);
|
let match_arms = ctx.arena.alloc_from_iter([ctx.arm(args_pat, array)]);
|
||||||
let match_expr = ctx.arena.alloc(ctx.expr_match(
|
let match_expr = ctx.arena.alloc(ctx.expr_match(
|
||||||
|
|||||||
@ -16,9 +16,8 @@ use rustc_hir::PredicateOrigin;
|
|||||||
use rustc_index::{Idx, IndexSlice, IndexVec};
|
use rustc_index::{Idx, IndexSlice, IndexVec};
|
||||||
use rustc_middle::ty::{ResolverAstLowering, TyCtxt};
|
use rustc_middle::ty::{ResolverAstLowering, TyCtxt};
|
||||||
use rustc_span::edit_distance::find_best_match_for_name;
|
use rustc_span::edit_distance::find_best_match_for_name;
|
||||||
use rustc_span::source_map::DesugaringKind;
|
|
||||||
use rustc_span::symbol::{kw, sym, Ident};
|
use rustc_span::symbol::{kw, sym, Ident};
|
||||||
use rustc_span::{Span, Symbol};
|
use rustc_span::{DesugaringKind, Span, Symbol};
|
||||||
use rustc_target::spec::abi;
|
use rustc_target::spec::abi;
|
||||||
use smallvec::{smallvec, SmallVec};
|
use smallvec::{smallvec, SmallVec};
|
||||||
use thin_vec::ThinVec;
|
use thin_vec::ThinVec;
|
||||||
@ -82,7 +81,7 @@ impl<'a, 'hir> ItemLowerer<'a, 'hir> {
|
|||||||
is_in_loop_condition: false,
|
is_in_loop_condition: false,
|
||||||
is_in_trait_impl: false,
|
is_in_trait_impl: false,
|
||||||
is_in_dyn_type: false,
|
is_in_dyn_type: false,
|
||||||
generator_kind: None,
|
coroutine_kind: None,
|
||||||
task_context: None,
|
task_context: None,
|
||||||
current_item: None,
|
current_item: None,
|
||||||
impl_trait_defs: Vec::new(),
|
impl_trait_defs: Vec::new(),
|
||||||
@ -974,7 +973,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||||||
value: hir::Expr<'hir>,
|
value: hir::Expr<'hir>,
|
||||||
) -> hir::BodyId {
|
) -> hir::BodyId {
|
||||||
let body = hir::Body {
|
let body = hir::Body {
|
||||||
generator_kind: self.generator_kind,
|
coroutine_kind: self.coroutine_kind,
|
||||||
params,
|
params,
|
||||||
value: self.arena.alloc(value),
|
value: self.arena.alloc(value),
|
||||||
};
|
};
|
||||||
@ -988,12 +987,12 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||||||
&mut self,
|
&mut self,
|
||||||
f: impl FnOnce(&mut Self) -> (&'hir [hir::Param<'hir>], hir::Expr<'hir>),
|
f: impl FnOnce(&mut Self) -> (&'hir [hir::Param<'hir>], hir::Expr<'hir>),
|
||||||
) -> hir::BodyId {
|
) -> hir::BodyId {
|
||||||
let prev_gen_kind = self.generator_kind.take();
|
let prev_coroutine_kind = self.coroutine_kind.take();
|
||||||
let task_context = self.task_context.take();
|
let task_context = self.task_context.take();
|
||||||
let (parameters, result) = f(self);
|
let (parameters, result) = f(self);
|
||||||
let body_id = self.record_body(parameters, result);
|
let body_id = self.record_body(parameters, result);
|
||||||
self.task_context = task_context;
|
self.task_context = task_context;
|
||||||
self.generator_kind = prev_gen_kind;
|
self.coroutine_kind = prev_coroutine_kind;
|
||||||
body_id
|
body_id
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1202,11 +1201,11 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let async_expr = this.make_async_expr(
|
let async_expr = this.make_async_expr(
|
||||||
CaptureBy::Value,
|
CaptureBy::Value { move_kw: rustc_span::DUMMY_SP },
|
||||||
closure_id,
|
closure_id,
|
||||||
None,
|
None,
|
||||||
body.span,
|
body.span,
|
||||||
hir::AsyncGeneratorKind::Fn,
|
hir::CoroutineSource::Fn,
|
||||||
|this| {
|
|this| {
|
||||||
// Create a block from the user's function body:
|
// Create a block from the user's function body:
|
||||||
let user_body = this.lower_block_expr(body);
|
let user_body = this.lower_block_expr(body);
|
||||||
@ -1387,10 +1386,12 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||||||
// Desugar `~const` bound in generics into an additional `const host: bool` param
|
// Desugar `~const` bound in generics into an additional `const host: bool` param
|
||||||
// if the effects feature is enabled. This needs to be done before we lower where
|
// if the effects feature is enabled. This needs to be done before we lower where
|
||||||
// clauses since where clauses need to bind to the DefId of the host param
|
// clauses since where clauses need to bind to the DefId of the host param
|
||||||
let host_param_parts = if let Const::Yes(span) = constness && self.tcx.features().effects {
|
let host_param_parts = if let Const::Yes(span) = constness
|
||||||
if let Some(param) = generics.params.iter().find(|x| {
|
&& self.tcx.features().effects
|
||||||
x.attrs.iter().any(|x| x.has_name(sym::rustc_host))
|
{
|
||||||
}) {
|
if let Some(param) =
|
||||||
|
generics.params.iter().find(|x| x.attrs.iter().any(|x| x.has_name(sym::rustc_host)))
|
||||||
|
{
|
||||||
// user has manually specified a `rustc_host` param, in this case, we set
|
// user has manually specified a `rustc_host` param, in this case, we set
|
||||||
// the param id so that lowering logic can use that. But we don't create
|
// the param id so that lowering logic can use that. But we don't create
|
||||||
// another host param, so this gives `None`.
|
// another host param, so this gives `None`.
|
||||||
@ -1399,7 +1400,12 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||||||
} else {
|
} else {
|
||||||
let param_node_id = self.next_node_id();
|
let param_node_id = self.next_node_id();
|
||||||
let hir_id = self.next_id();
|
let hir_id = self.next_id();
|
||||||
let def_id = self.create_def(self.local_def_id(parent_node_id), param_node_id, DefPathData::TypeNs(sym::host), span);
|
let def_id = self.create_def(
|
||||||
|
self.local_def_id(parent_node_id),
|
||||||
|
param_node_id,
|
||||||
|
DefPathData::TypeNs(sym::host),
|
||||||
|
span,
|
||||||
|
);
|
||||||
self.host_param_id = Some(def_id);
|
self.host_param_id = Some(def_id);
|
||||||
Some((span, hir_id, def_id))
|
Some((span, hir_id, def_id))
|
||||||
}
|
}
|
||||||
@ -1623,12 +1629,10 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||||||
.lower_generic_params(bound_generic_params, hir::GenericParamSource::Binder),
|
.lower_generic_params(bound_generic_params, hir::GenericParamSource::Binder),
|
||||||
bounded_ty: self
|
bounded_ty: self
|
||||||
.lower_ty(bounded_ty, &ImplTraitContext::Disallowed(ImplTraitPosition::Bound)),
|
.lower_ty(bounded_ty, &ImplTraitContext::Disallowed(ImplTraitPosition::Bound)),
|
||||||
bounds: self.arena.alloc_from_iter(bounds.iter().map(|bound| {
|
bounds: self.lower_param_bounds(
|
||||||
self.lower_param_bound(
|
bounds,
|
||||||
bound,
|
&ImplTraitContext::Disallowed(ImplTraitPosition::Bound),
|
||||||
&ImplTraitContext::Disallowed(ImplTraitPosition::Bound),
|
),
|
||||||
)
|
|
||||||
})),
|
|
||||||
span: self.lower_span(*span),
|
span: self.lower_span(*span),
|
||||||
origin: PredicateOrigin::WhereClause,
|
origin: PredicateOrigin::WhereClause,
|
||||||
}),
|
}),
|
||||||
|
|||||||
@ -30,6 +30,9 @@
|
|||||||
//! get confused if the spans from leaf AST nodes occur in multiple places
|
//! get confused if the spans from leaf AST nodes occur in multiple places
|
||||||
//! in the HIR, especially for multiple identifiers.
|
//! in the HIR, especially for multiple identifiers.
|
||||||
|
|
||||||
|
#![cfg_attr(not(bootstrap), allow(internal_features))]
|
||||||
|
#![cfg_attr(not(bootstrap), feature(rustdoc_internals))]
|
||||||
|
#![cfg_attr(not(bootstrap), doc(rust_logo))]
|
||||||
#![feature(box_patterns)]
|
#![feature(box_patterns)]
|
||||||
#![feature(let_chains)]
|
#![feature(let_chains)]
|
||||||
#![feature(never_type)]
|
#![feature(never_type)]
|
||||||
@ -40,7 +43,7 @@
|
|||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate tracing;
|
extern crate tracing;
|
||||||
|
|
||||||
use crate::errors::{AssocTyParentheses, AssocTyParenthesesSub, MisplacedImplTrait, TraitFnAsync};
|
use crate::errors::{AssocTyParentheses, AssocTyParenthesesSub, MisplacedImplTrait};
|
||||||
|
|
||||||
use rustc_ast::ptr::P;
|
use rustc_ast::ptr::P;
|
||||||
use rustc_ast::visit;
|
use rustc_ast::visit;
|
||||||
@ -68,9 +71,8 @@ use rustc_middle::{
|
|||||||
};
|
};
|
||||||
use rustc_session::parse::{add_feature_diagnostics, feature_err};
|
use rustc_session::parse::{add_feature_diagnostics, feature_err};
|
||||||
use rustc_span::hygiene::MacroKind;
|
use rustc_span::hygiene::MacroKind;
|
||||||
use rustc_span::source_map::DesugaringKind;
|
|
||||||
use rustc_span::symbol::{kw, sym, Ident, Symbol};
|
use rustc_span::symbol::{kw, sym, Ident, Symbol};
|
||||||
use rustc_span::{Span, DUMMY_SP};
|
use rustc_span::{DesugaringKind, Span, DUMMY_SP};
|
||||||
use smallvec::SmallVec;
|
use smallvec::SmallVec;
|
||||||
use std::collections::hash_map::Entry;
|
use std::collections::hash_map::Entry;
|
||||||
use thin_vec::ThinVec;
|
use thin_vec::ThinVec;
|
||||||
@ -108,10 +110,10 @@ struct LoweringContext<'a, 'hir> {
|
|||||||
/// Collect items that were created by lowering the current owner.
|
/// Collect items that were created by lowering the current owner.
|
||||||
children: Vec<(LocalDefId, hir::MaybeOwner<&'hir hir::OwnerInfo<'hir>>)>,
|
children: Vec<(LocalDefId, hir::MaybeOwner<&'hir hir::OwnerInfo<'hir>>)>,
|
||||||
|
|
||||||
generator_kind: Option<hir::GeneratorKind>,
|
coroutine_kind: Option<hir::CoroutineKind>,
|
||||||
|
|
||||||
/// When inside an `async` context, this is the `HirId` of the
|
/// When inside an `async` context, this is the `HirId` of the
|
||||||
/// `task_context` local bound to the resume argument of the generator.
|
/// `task_context` local bound to the resume argument of the coroutine.
|
||||||
task_context: Option<hir::HirId>,
|
task_context: Option<hir::HirId>,
|
||||||
|
|
||||||
/// Used to get the current `fn`'s def span to point to when using `await`
|
/// Used to get the current `fn`'s def span to point to when using `await`
|
||||||
@ -271,8 +273,6 @@ enum ImplTraitPosition {
|
|||||||
ClosureReturn,
|
ClosureReturn,
|
||||||
PointerReturn,
|
PointerReturn,
|
||||||
FnTraitReturn,
|
FnTraitReturn,
|
||||||
TraitReturn,
|
|
||||||
ImplReturn,
|
|
||||||
GenericDefault,
|
GenericDefault,
|
||||||
ConstTy,
|
ConstTy,
|
||||||
StaticTy,
|
StaticTy,
|
||||||
@ -302,8 +302,6 @@ impl std::fmt::Display for ImplTraitPosition {
|
|||||||
ImplTraitPosition::ClosureReturn => "closure return types",
|
ImplTraitPosition::ClosureReturn => "closure return types",
|
||||||
ImplTraitPosition::PointerReturn => "`fn` pointer return types",
|
ImplTraitPosition::PointerReturn => "`fn` pointer return types",
|
||||||
ImplTraitPosition::FnTraitReturn => "`Fn` trait return types",
|
ImplTraitPosition::FnTraitReturn => "`Fn` trait return types",
|
||||||
ImplTraitPosition::TraitReturn => "trait method return types",
|
|
||||||
ImplTraitPosition::ImplReturn => "`impl` method return types",
|
|
||||||
ImplTraitPosition::GenericDefault => "generic parameter defaults",
|
ImplTraitPosition::GenericDefault => "generic parameter defaults",
|
||||||
ImplTraitPosition::ConstTy => "const types",
|
ImplTraitPosition::ConstTy => "const types",
|
||||||
ImplTraitPosition::StaticTy => "static types",
|
ImplTraitPosition::StaticTy => "static types",
|
||||||
@ -334,20 +332,9 @@ impl FnDeclKind {
|
|||||||
matches!(self, FnDeclKind::Fn | FnDeclKind::Inherent | FnDeclKind::Impl | FnDeclKind::Trait)
|
matches!(self, FnDeclKind::Fn | FnDeclKind::Inherent | FnDeclKind::Impl | FnDeclKind::Trait)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn return_impl_trait_allowed(&self, tcx: TyCtxt<'_>) -> bool {
|
fn return_impl_trait_allowed(&self) -> bool {
|
||||||
match self {
|
match self {
|
||||||
FnDeclKind::Fn | FnDeclKind::Inherent => true,
|
FnDeclKind::Fn | FnDeclKind::Inherent | FnDeclKind::Impl | FnDeclKind::Trait => true,
|
||||||
FnDeclKind::Impl if tcx.features().return_position_impl_trait_in_trait => true,
|
|
||||||
FnDeclKind::Trait if tcx.features().return_position_impl_trait_in_trait => true,
|
|
||||||
_ => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn async_fn_allowed(&self, tcx: TyCtxt<'_>) -> bool {
|
|
||||||
match self {
|
|
||||||
FnDeclKind::Fn | FnDeclKind::Inherent => true,
|
|
||||||
FnDeclKind::Impl if tcx.features().async_fn_in_trait => true,
|
|
||||||
FnDeclKind::Trait if tcx.features().async_fn_in_trait => true,
|
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1229,7 +1216,11 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||||||
hir_id: this.lower_node_id(node_id),
|
hir_id: this.lower_node_id(node_id),
|
||||||
body: this.lower_const_body(path_expr.span, Some(&path_expr)),
|
body: this.lower_const_body(path_expr.span, Some(&path_expr)),
|
||||||
});
|
});
|
||||||
return GenericArg::Const(ConstArg { value: ct, span });
|
return GenericArg::Const(ConstArg {
|
||||||
|
value: ct,
|
||||||
|
span,
|
||||||
|
is_desugared_from_effects: false,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1240,6 +1231,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||||||
ast::GenericArg::Const(ct) => GenericArg::Const(ConstArg {
|
ast::GenericArg::Const(ct) => GenericArg::Const(ConstArg {
|
||||||
value: self.lower_anon_const(&ct),
|
value: self.lower_anon_const(&ct),
|
||||||
span: self.lower_span(ct.value.span),
|
span: self.lower_span(ct.value.span),
|
||||||
|
is_desugared_from_effects: false,
|
||||||
}),
|
}),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1271,7 +1263,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||||||
&PolyTraitRef {
|
&PolyTraitRef {
|
||||||
bound_generic_params: ThinVec::new(),
|
bound_generic_params: ThinVec::new(),
|
||||||
trait_ref: TraitRef { path: path.clone(), ref_id: t.id },
|
trait_ref: TraitRef { path: path.clone(), ref_id: t.id },
|
||||||
span: t.span
|
span: t.span,
|
||||||
},
|
},
|
||||||
itctx,
|
itctx,
|
||||||
ast::Const::No,
|
ast::Const::No,
|
||||||
@ -1749,14 +1741,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn lower_fn_params_to_names(&mut self, decl: &FnDecl) -> &'hir [Ident] {
|
fn lower_fn_params_to_names(&mut self, decl: &FnDecl) -> &'hir [Ident] {
|
||||||
// Skip the `...` (`CVarArgs`) trailing arguments from the AST,
|
self.arena.alloc_from_iter(decl.inputs.iter().map(|param| match param.pat.kind {
|
||||||
// as they are not explicit in HIR/Ty function signatures.
|
|
||||||
// (instead, the `c_variadic` flag is set to `true`)
|
|
||||||
let mut inputs = &decl.inputs[..];
|
|
||||||
if decl.c_variadic() {
|
|
||||||
inputs = &inputs[..inputs.len() - 1];
|
|
||||||
}
|
|
||||||
self.arena.alloc_from_iter(inputs.iter().map(|param| match param.pat.kind {
|
|
||||||
PatKind::Ident(_, ident, _) => self.lower_ident(ident),
|
PatKind::Ident(_, ident, _) => self.lower_ident(ident),
|
||||||
_ => Ident::new(kw::Empty, self.lower_span(param.pat.span)),
|
_ => Ident::new(kw::Empty, self.lower_span(param.pat.span)),
|
||||||
}))
|
}))
|
||||||
@ -1805,53 +1790,30 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||||||
self.lower_ty_direct(¶m.ty, &itctx)
|
self.lower_ty_direct(¶m.ty, &itctx)
|
||||||
}));
|
}));
|
||||||
|
|
||||||
let output = if let Some((ret_id, span)) = make_ret_async {
|
let output = if let Some((ret_id, _span)) = make_ret_async {
|
||||||
if !kind.async_fn_allowed(self.tcx) {
|
|
||||||
match kind {
|
|
||||||
FnDeclKind::Trait | FnDeclKind::Impl => {
|
|
||||||
self.tcx
|
|
||||||
.sess
|
|
||||||
.create_feature_err(
|
|
||||||
TraitFnAsync { fn_span, span },
|
|
||||||
sym::async_fn_in_trait,
|
|
||||||
)
|
|
||||||
.emit();
|
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
self.tcx.sess.emit_err(TraitFnAsync { fn_span, span });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let fn_def_id = self.local_def_id(fn_node_id);
|
let fn_def_id = self.local_def_id(fn_node_id);
|
||||||
self.lower_async_fn_ret_ty(&decl.output, fn_def_id, ret_id, kind)
|
self.lower_async_fn_ret_ty(&decl.output, fn_def_id, ret_id, kind, fn_span)
|
||||||
} else {
|
} else {
|
||||||
match &decl.output {
|
match &decl.output {
|
||||||
FnRetTy::Ty(ty) => {
|
FnRetTy::Ty(ty) => {
|
||||||
let context = if kind.return_impl_trait_allowed(self.tcx) {
|
let context = if kind.return_impl_trait_allowed() {
|
||||||
let fn_def_id = self.local_def_id(fn_node_id);
|
let fn_def_id = self.local_def_id(fn_node_id);
|
||||||
ImplTraitContext::ReturnPositionOpaqueTy {
|
ImplTraitContext::ReturnPositionOpaqueTy {
|
||||||
origin: hir::OpaqueTyOrigin::FnReturn(fn_def_id),
|
origin: hir::OpaqueTyOrigin::FnReturn(fn_def_id),
|
||||||
fn_kind: kind,
|
fn_kind: kind,
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
let position = match kind {
|
ImplTraitContext::Disallowed(match kind {
|
||||||
FnDeclKind::Fn | FnDeclKind::Inherent => {
|
FnDeclKind::Fn
|
||||||
unreachable!("fn should allow in-band lifetimes")
|
| FnDeclKind::Inherent
|
||||||
|
| FnDeclKind::Trait
|
||||||
|
| FnDeclKind::Impl => {
|
||||||
|
unreachable!("fn should allow return-position impl trait in traits")
|
||||||
}
|
}
|
||||||
FnDeclKind::ExternFn => ImplTraitPosition::ExternFnReturn,
|
FnDeclKind::ExternFn => ImplTraitPosition::ExternFnReturn,
|
||||||
FnDeclKind::Closure => ImplTraitPosition::ClosureReturn,
|
FnDeclKind::Closure => ImplTraitPosition::ClosureReturn,
|
||||||
FnDeclKind::Pointer => ImplTraitPosition::PointerReturn,
|
FnDeclKind::Pointer => ImplTraitPosition::PointerReturn,
|
||||||
FnDeclKind::Trait => ImplTraitPosition::TraitReturn,
|
})
|
||||||
FnDeclKind::Impl => ImplTraitPosition::ImplReturn,
|
|
||||||
};
|
|
||||||
match kind {
|
|
||||||
FnDeclKind::Trait | FnDeclKind::Impl => ImplTraitContext::FeatureGated(
|
|
||||||
position,
|
|
||||||
sym::return_position_impl_trait_in_trait,
|
|
||||||
),
|
|
||||||
_ => ImplTraitContext::Disallowed(position),
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
hir::FnRetTy::Return(self.lower_ty(ty, &context))
|
hir::FnRetTy::Return(self.lower_ty(ty, &context))
|
||||||
}
|
}
|
||||||
@ -1901,8 +1863,9 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||||||
fn_def_id: LocalDefId,
|
fn_def_id: LocalDefId,
|
||||||
opaque_ty_node_id: NodeId,
|
opaque_ty_node_id: NodeId,
|
||||||
fn_kind: FnDeclKind,
|
fn_kind: FnDeclKind,
|
||||||
|
fn_span: Span,
|
||||||
) -> hir::FnRetTy<'hir> {
|
) -> hir::FnRetTy<'hir> {
|
||||||
let span = self.lower_span(output.span());
|
let span = self.lower_span(fn_span);
|
||||||
let opaque_ty_span = self.mark_span_with_reason(DesugaringKind::Async, span, None);
|
let opaque_ty_span = self.mark_span_with_reason(DesugaringKind::Async, span, None);
|
||||||
|
|
||||||
let captured_lifetimes: Vec<_> = self
|
let captured_lifetimes: Vec<_> = self
|
||||||
@ -1923,18 +1886,9 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||||||
let future_bound = this.lower_async_fn_output_type_to_future_bound(
|
let future_bound = this.lower_async_fn_output_type_to_future_bound(
|
||||||
output,
|
output,
|
||||||
span,
|
span,
|
||||||
if let FnDeclKind::Trait = fn_kind
|
ImplTraitContext::ReturnPositionOpaqueTy {
|
||||||
&& !this.tcx.features().return_position_impl_trait_in_trait
|
origin: hir::OpaqueTyOrigin::FnReturn(fn_def_id),
|
||||||
{
|
fn_kind,
|
||||||
ImplTraitContext::FeatureGated(
|
|
||||||
ImplTraitPosition::TraitReturn,
|
|
||||||
sym::return_position_impl_trait_in_trait,
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
ImplTraitContext::ReturnPositionOpaqueTy {
|
|
||||||
origin: hir::OpaqueTyOrigin::FnReturn(fn_def_id),
|
|
||||||
fn_kind,
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
arena_vec![this; future_bound]
|
arena_vec![this; future_bound]
|
||||||
@ -2568,6 +2522,7 @@ impl<'hir> GenericArgsCtor<'hir> {
|
|||||||
self.args.push(hir::GenericArg::Const(hir::ConstArg {
|
self.args.push(hir::GenericArg::Const(hir::ConstArg {
|
||||||
value: hir::AnonConst { def_id, hir_id, body },
|
value: hir::AnonConst { def_id, hir_id, body },
|
||||||
span,
|
span,
|
||||||
|
is_desugared_from_effects: true,
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -82,7 +82,8 @@ impl<'ast> Visitor<'ast> for LifetimeCollectVisitor<'ast> {
|
|||||||
// We can sometimes encounter bare trait objects
|
// We can sometimes encounter bare trait objects
|
||||||
// which are represented in AST as paths.
|
// which are represented in AST as paths.
|
||||||
if let Some(partial_res) = self.resolver.get_partial_res(t.id)
|
if let Some(partial_res) = self.resolver.get_partial_res(t.id)
|
||||||
&& let Some(Res::Def(DefKind::Trait | DefKind::TraitAlias, _)) = partial_res.full_res()
|
&& let Some(Res::Def(DefKind::Trait | DefKind::TraitAlias, _)) =
|
||||||
|
partial_res.full_res()
|
||||||
{
|
{
|
||||||
self.current_binders.push(t.id);
|
self.current_binders.push(t.id);
|
||||||
visit::walk_ty(self, t);
|
visit::walk_ty(self, t);
|
||||||
|
|||||||
@ -4,6 +4,7 @@ version = "0.0.0"
|
|||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
# tidy-alphabetical-start
|
||||||
itertools = "0.10.1"
|
itertools = "0.10.1"
|
||||||
rustc_ast = { path = "../rustc_ast" }
|
rustc_ast = { path = "../rustc_ast" }
|
||||||
rustc_ast_pretty = { path = "../rustc_ast_pretty" }
|
rustc_ast_pretty = { path = "../rustc_ast_pretty" }
|
||||||
@ -11,11 +12,11 @@ rustc_attr = { path = "../rustc_attr" }
|
|||||||
rustc_data_structures = { path = "../rustc_data_structures" }
|
rustc_data_structures = { path = "../rustc_data_structures" }
|
||||||
rustc_errors = { path = "../rustc_errors" }
|
rustc_errors = { path = "../rustc_errors" }
|
||||||
rustc_feature = { path = "../rustc_feature" }
|
rustc_feature = { path = "../rustc_feature" }
|
||||||
rustc_macros = { path = "../rustc_macros" }
|
|
||||||
rustc_fluent_macro = { path = "../rustc_fluent_macro" }
|
rustc_fluent_macro = { path = "../rustc_fluent_macro" }
|
||||||
|
rustc_macros = { path = "../rustc_macros" }
|
||||||
rustc_parse = { path = "../rustc_parse" }
|
rustc_parse = { path = "../rustc_parse" }
|
||||||
rustc_session = { path = "../rustc_session" }
|
rustc_session = { path = "../rustc_session" }
|
||||||
rustc_span = { path = "../rustc_span" }
|
rustc_span = { path = "../rustc_span" }
|
||||||
rustc_target = { path = "../rustc_target" }
|
rustc_target = { path = "../rustc_target" }
|
||||||
thin-vec = "0.2.12"
|
thin-vec = "0.2.12"
|
||||||
tracing = "0.1"
|
# tidy-alphabetical-end
|
||||||
|
|||||||
@ -42,6 +42,10 @@ ast_passes_const_and_async = functions cannot be both `const` and `async`
|
|||||||
.async = `async` because of this
|
.async = `async` because of this
|
||||||
.label = {""}
|
.label = {""}
|
||||||
|
|
||||||
|
ast_passes_const_and_c_variadic = functions cannot be both `const` and C-variadic
|
||||||
|
.const = `const` because of this
|
||||||
|
.variadic = C-variadic because of this
|
||||||
|
|
||||||
ast_passes_const_without_body =
|
ast_passes_const_without_body =
|
||||||
free constant item without body
|
free constant item without body
|
||||||
.suggestion = provide a definition for the constant
|
.suggestion = provide a definition for the constant
|
||||||
|
|||||||
@ -52,7 +52,8 @@ struct AstValidator<'a> {
|
|||||||
/// Are we inside a trait impl?
|
/// Are we inside a trait impl?
|
||||||
in_trait_impl: bool,
|
in_trait_impl: bool,
|
||||||
|
|
||||||
in_const_trait_impl: bool,
|
/// Are we inside a const trait defn or impl?
|
||||||
|
in_const_trait_or_impl: bool,
|
||||||
|
|
||||||
has_proc_macro_decls: bool,
|
has_proc_macro_decls: bool,
|
||||||
|
|
||||||
@ -78,11 +79,19 @@ impl<'a> AstValidator<'a> {
|
|||||||
f: impl FnOnce(&mut Self),
|
f: impl FnOnce(&mut Self),
|
||||||
) {
|
) {
|
||||||
let old = mem::replace(&mut self.in_trait_impl, is_in);
|
let old = mem::replace(&mut self.in_trait_impl, is_in);
|
||||||
let old_const =
|
let old_const = mem::replace(
|
||||||
mem::replace(&mut self.in_const_trait_impl, matches!(constness, Some(Const::Yes(_))));
|
&mut self.in_const_trait_or_impl,
|
||||||
|
matches!(constness, Some(Const::Yes(_))),
|
||||||
|
);
|
||||||
f(self);
|
f(self);
|
||||||
self.in_trait_impl = old;
|
self.in_trait_impl = old;
|
||||||
self.in_const_trait_impl = old_const;
|
self.in_const_trait_or_impl = old_const;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn with_in_trait(&mut self, is_const: bool, f: impl FnOnce(&mut Self)) {
|
||||||
|
let old = mem::replace(&mut self.in_const_trait_or_impl, is_const);
|
||||||
|
f(self);
|
||||||
|
self.in_const_trait_or_impl = old;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn with_banned_impl_trait(&mut self, f: impl FnOnce(&mut Self)) {
|
fn with_banned_impl_trait(&mut self, f: impl FnOnce(&mut Self)) {
|
||||||
@ -206,14 +215,15 @@ impl<'a> AstValidator<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn visit_struct_field_def(&mut self, field: &'a FieldDef) {
|
fn visit_struct_field_def(&mut self, field: &'a FieldDef) {
|
||||||
if let Some(ident) = field.ident &&
|
if let Some(ident) = field.ident
|
||||||
ident.name == kw::Underscore {
|
&& ident.name == kw::Underscore
|
||||||
self.check_unnamed_field_ty(&field.ty, ident.span);
|
{
|
||||||
self.visit_vis(&field.vis);
|
self.check_unnamed_field_ty(&field.ty, ident.span);
|
||||||
self.visit_ident(ident);
|
self.visit_vis(&field.vis);
|
||||||
self.visit_ty_common(&field.ty);
|
self.visit_ident(ident);
|
||||||
self.walk_ty(&field.ty);
|
self.visit_ty_common(&field.ty);
|
||||||
walk_list!(self, visit_attribute, &field.attrs);
|
self.walk_ty(&field.ty);
|
||||||
|
walk_list!(self, visit_attribute, &field.attrs);
|
||||||
} else {
|
} else {
|
||||||
self.visit_field_def(field);
|
self.visit_field_def(field);
|
||||||
}
|
}
|
||||||
@ -282,13 +292,11 @@ impl<'a> AstValidator<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn deny_unnamed_field(&self, field: &FieldDef) {
|
fn deny_unnamed_field(&self, field: &FieldDef) {
|
||||||
if let Some(ident) = field.ident &&
|
if let Some(ident) = field.ident
|
||||||
ident.name == kw::Underscore {
|
&& ident.name == kw::Underscore
|
||||||
self.err_handler()
|
{
|
||||||
.emit_err(errors::InvalidUnnamedField {
|
self.err_handler()
|
||||||
span: field.span,
|
.emit_err(errors::InvalidUnnamedField { span: field.span, ident_span: ident.span });
|
||||||
ident_span: ident.span
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -474,9 +482,36 @@ impl<'a> AstValidator<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Reject C-variadic type unless the function is foreign,
|
/// Reject invalid C-variadic types.
|
||||||
/// or free and `unsafe extern "C"` semantically.
|
///
|
||||||
|
/// C-variadics must be:
|
||||||
|
/// - Non-const
|
||||||
|
/// - Either foreign, or free and `unsafe extern "C"` semantically
|
||||||
fn check_c_variadic_type(&self, fk: FnKind<'a>) {
|
fn check_c_variadic_type(&self, fk: FnKind<'a>) {
|
||||||
|
let variadic_spans: Vec<_> = fk
|
||||||
|
.decl()
|
||||||
|
.inputs
|
||||||
|
.iter()
|
||||||
|
.filter(|arg| matches!(arg.ty.kind, TyKind::CVarArgs))
|
||||||
|
.map(|arg| arg.span)
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
if variadic_spans.is_empty() {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(header) = fk.header() {
|
||||||
|
if let Const::Yes(const_span) = header.constness {
|
||||||
|
let mut spans = variadic_spans.clone();
|
||||||
|
spans.push(const_span);
|
||||||
|
self.err_handler().emit_err(errors::ConstAndCVariadic {
|
||||||
|
spans,
|
||||||
|
const_span,
|
||||||
|
variadic_spans: variadic_spans.clone(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
match (fk.ctxt(), fk.header()) {
|
match (fk.ctxt(), fk.header()) {
|
||||||
(Some(FnCtxt::Foreign), _) => return,
|
(Some(FnCtxt::Foreign), _) => return,
|
||||||
(Some(FnCtxt::Free), Some(header)) => match header.ext {
|
(Some(FnCtxt::Free), Some(header)) => match header.ext {
|
||||||
@ -491,11 +526,7 @@ impl<'a> AstValidator<'a> {
|
|||||||
_ => {}
|
_ => {}
|
||||||
};
|
};
|
||||||
|
|
||||||
for Param { ty, span, .. } in &fk.decl().inputs {
|
self.err_handler().emit_err(errors::BadCVariadic { span: variadic_spans });
|
||||||
if let TyKind::CVarArgs = ty.kind {
|
|
||||||
self.err_handler().emit_err(errors::BadCVariadic { span: *span });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_item_named(&self, ident: Ident, kind: &str) {
|
fn check_item_named(&self, ident: Ident, kind: &str) {
|
||||||
@ -933,23 +964,26 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
ItemKind::Trait(box Trait { is_auto, generics, bounds, items, .. }) => {
|
ItemKind::Trait(box Trait { is_auto, generics, bounds, items, .. }) => {
|
||||||
if *is_auto == IsAuto::Yes {
|
let is_const_trait = attr::contains_name(&item.attrs, sym::const_trait);
|
||||||
// Auto traits cannot have generics, super traits nor contain items.
|
self.with_in_trait(is_const_trait, |this| {
|
||||||
self.deny_generic_params(generics, item.ident.span);
|
if *is_auto == IsAuto::Yes {
|
||||||
self.deny_super_traits(bounds, item.ident.span);
|
// Auto traits cannot have generics, super traits nor contain items.
|
||||||
self.deny_where_clause(&generics.where_clause, item.ident.span);
|
this.deny_generic_params(generics, item.ident.span);
|
||||||
self.deny_items(items, item.ident.span);
|
this.deny_super_traits(bounds, item.ident.span);
|
||||||
}
|
this.deny_where_clause(&generics.where_clause, item.ident.span);
|
||||||
|
this.deny_items(items, item.ident.span);
|
||||||
|
}
|
||||||
|
|
||||||
// Equivalent of `visit::walk_item` for `ItemKind::Trait` that inserts a bound
|
// Equivalent of `visit::walk_item` for `ItemKind::Trait` that inserts a bound
|
||||||
// context for the supertraits.
|
// context for the supertraits.
|
||||||
self.visit_vis(&item.vis);
|
this.visit_vis(&item.vis);
|
||||||
self.visit_ident(item.ident);
|
this.visit_ident(item.ident);
|
||||||
self.visit_generics(generics);
|
this.visit_generics(generics);
|
||||||
self.with_tilde_const_allowed(|this| {
|
this.with_tilde_const_allowed(|this| {
|
||||||
walk_list!(this, visit_param_bound, bounds, BoundKind::SuperTraits)
|
walk_list!(this, visit_param_bound, bounds, BoundKind::SuperTraits)
|
||||||
|
});
|
||||||
|
walk_list!(this, visit_assoc_item, items, AssocCtxt::Trait);
|
||||||
});
|
});
|
||||||
walk_list!(self, visit_assoc_item, items, AssocCtxt::Trait);
|
|
||||||
walk_list!(self, visit_attribute, &item.attrs);
|
walk_list!(self, visit_attribute, &item.attrs);
|
||||||
return; // Avoid visiting again
|
return; // Avoid visiting again
|
||||||
}
|
}
|
||||||
@ -1168,28 +1202,40 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
|
|||||||
(BoundKind::SuperTraits, TraitBoundModifier::Maybe) => {
|
(BoundKind::SuperTraits, TraitBoundModifier::Maybe) => {
|
||||||
self.err_handler().emit_err(errors::OptionalTraitSupertrait {
|
self.err_handler().emit_err(errors::OptionalTraitSupertrait {
|
||||||
span: poly.span,
|
span: poly.span,
|
||||||
path_str: pprust::path_to_string(&poly.trait_ref.path)
|
path_str: pprust::path_to_string(&poly.trait_ref.path),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
(BoundKind::TraitObject, TraitBoundModifier::Maybe) => {
|
(BoundKind::TraitObject, TraitBoundModifier::Maybe) => {
|
||||||
self.err_handler().emit_err(errors::OptionalTraitObject {span: poly.span});
|
self.err_handler().emit_err(errors::OptionalTraitObject { span: poly.span });
|
||||||
}
|
}
|
||||||
(_, TraitBoundModifier::MaybeConst) if let Some(reason) = &self.disallow_tilde_const => {
|
(_, TraitBoundModifier::MaybeConst)
|
||||||
|
if let Some(reason) = &self.disallow_tilde_const =>
|
||||||
|
{
|
||||||
let reason = match reason {
|
let reason = match reason {
|
||||||
DisallowTildeConstContext::TraitObject => errors::TildeConstReason::TraitObject,
|
DisallowTildeConstContext::TraitObject => {
|
||||||
DisallowTildeConstContext::Fn(FnKind::Closure(..)) => errors::TildeConstReason::Closure,
|
errors::TildeConstReason::TraitObject
|
||||||
DisallowTildeConstContext::Fn(FnKind::Fn(_, ident, ..)) => errors::TildeConstReason::Function { ident: ident.span },
|
}
|
||||||
|
DisallowTildeConstContext::Fn(FnKind::Closure(..)) => {
|
||||||
|
errors::TildeConstReason::Closure
|
||||||
|
}
|
||||||
|
DisallowTildeConstContext::Fn(FnKind::Fn(_, ident, ..)) => {
|
||||||
|
errors::TildeConstReason::Function { ident: ident.span }
|
||||||
|
}
|
||||||
};
|
};
|
||||||
self.err_handler().emit_err(errors::TildeConstDisallowed {
|
self.err_handler()
|
||||||
span: bound.span(),
|
.emit_err(errors::TildeConstDisallowed { span: bound.span(), reason });
|
||||||
reason
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
(_, TraitBoundModifier::MaybeConstMaybe) => {
|
(_, TraitBoundModifier::MaybeConstMaybe) => {
|
||||||
self.err_handler().emit_err(errors::OptionalConstExclusive {span: bound.span(), modifier: "?" });
|
self.err_handler().emit_err(errors::OptionalConstExclusive {
|
||||||
|
span: bound.span(),
|
||||||
|
modifier: "?",
|
||||||
|
});
|
||||||
}
|
}
|
||||||
(_, TraitBoundModifier::MaybeConstNegative) => {
|
(_, TraitBoundModifier::MaybeConstNegative) => {
|
||||||
self.err_handler().emit_err(errors::OptionalConstExclusive {span: bound.span(), modifier: "!" });
|
self.err_handler().emit_err(errors::OptionalConstExclusive {
|
||||||
|
span: bound.span(),
|
||||||
|
modifier: "!",
|
||||||
|
});
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
@ -1202,7 +1248,8 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
|
|||||||
{
|
{
|
||||||
for arg in &args.args {
|
for arg in &args.args {
|
||||||
if let ast::AngleBracketedArg::Constraint(constraint) = arg {
|
if let ast::AngleBracketedArg::Constraint(constraint) = arg {
|
||||||
self.err_handler().emit_err(errors::ConstraintOnNegativeBound { span: constraint.span });
|
self.err_handler()
|
||||||
|
.emit_err(errors::ConstraintOnNegativeBound { span: constraint.span });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1278,7 +1325,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
|
|||||||
|
|
||||||
let tilde_const_allowed =
|
let tilde_const_allowed =
|
||||||
matches!(fk.header(), Some(FnHeader { constness: ast::Const::Yes(_), .. }))
|
matches!(fk.header(), Some(FnHeader { constness: ast::Const::Yes(_), .. }))
|
||||||
|| matches!(fk.ctxt(), Some(FnCtxt::Assoc(_)));
|
|| matches!(fk.ctxt(), Some(FnCtxt::Assoc(_)) if self.in_const_trait_or_impl);
|
||||||
|
|
||||||
let disallowed = (!tilde_const_allowed).then(|| DisallowTildeConstContext::Fn(fk));
|
let disallowed = (!tilde_const_allowed).then(|| DisallowTildeConstContext::Fn(fk));
|
||||||
|
|
||||||
@ -1363,7 +1410,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
|
|||||||
walk_list!(self, visit_ty, ty);
|
walk_list!(self, visit_ty, ty);
|
||||||
}
|
}
|
||||||
AssocItemKind::Fn(box Fn { sig, generics, body, .. })
|
AssocItemKind::Fn(box Fn { sig, generics, body, .. })
|
||||||
if self.in_const_trait_impl
|
if self.in_const_trait_or_impl
|
||||||
|| ctxt == AssocCtxt::Trait
|
|| ctxt == AssocCtxt::Trait
|
||||||
|| matches!(sig.header.constness, Const::Yes(_)) =>
|
|| matches!(sig.header.constness, Const::Yes(_)) =>
|
||||||
{
|
{
|
||||||
@ -1395,62 +1442,54 @@ fn deny_equality_constraints(
|
|||||||
let mut err = errors::EqualityInWhere { span: predicate.span, assoc: None, assoc2: None };
|
let mut err = errors::EqualityInWhere { span: predicate.span, assoc: None, assoc2: None };
|
||||||
|
|
||||||
// Given `<A as Foo>::Bar = RhsTy`, suggest `A: Foo<Bar = RhsTy>`.
|
// Given `<A as Foo>::Bar = RhsTy`, suggest `A: Foo<Bar = RhsTy>`.
|
||||||
if let TyKind::Path(Some(qself), full_path) = &predicate.lhs_ty.kind {
|
if let TyKind::Path(Some(qself), full_path) = &predicate.lhs_ty.kind
|
||||||
if let TyKind::Path(None, path) = &qself.ty.kind {
|
&& let TyKind::Path(None, path) = &qself.ty.kind
|
||||||
match &path.segments[..] {
|
&& let [PathSegment { ident, args: None, .. }] = &path.segments[..]
|
||||||
[PathSegment { ident, args: None, .. }] => {
|
{
|
||||||
for param in &generics.params {
|
for param in &generics.params {
|
||||||
if param.ident == *ident {
|
if param.ident == *ident
|
||||||
let param = ident;
|
&& let [PathSegment { ident, args, .. }] = &full_path.segments[qself.position..]
|
||||||
match &full_path.segments[qself.position..] {
|
{
|
||||||
[PathSegment { ident, args, .. }] => {
|
// Make a new `Path` from `foo::Bar` to `Foo<Bar = RhsTy>`.
|
||||||
// Make a new `Path` from `foo::Bar` to `Foo<Bar = RhsTy>`.
|
let mut assoc_path = full_path.clone();
|
||||||
let mut assoc_path = full_path.clone();
|
// Remove `Bar` from `Foo::Bar`.
|
||||||
// Remove `Bar` from `Foo::Bar`.
|
assoc_path.segments.pop();
|
||||||
assoc_path.segments.pop();
|
let len = assoc_path.segments.len() - 1;
|
||||||
let len = assoc_path.segments.len() - 1;
|
let gen_args = args.as_deref().cloned();
|
||||||
let gen_args = args.as_deref().cloned();
|
// Build `<Bar = RhsTy>`.
|
||||||
// Build `<Bar = RhsTy>`.
|
let arg = AngleBracketedArg::Constraint(AssocConstraint {
|
||||||
let arg = AngleBracketedArg::Constraint(AssocConstraint {
|
id: rustc_ast::node_id::DUMMY_NODE_ID,
|
||||||
id: rustc_ast::node_id::DUMMY_NODE_ID,
|
ident: *ident,
|
||||||
ident: *ident,
|
gen_args,
|
||||||
gen_args,
|
kind: AssocConstraintKind::Equality {
|
||||||
kind: AssocConstraintKind::Equality {
|
term: predicate.rhs_ty.clone().into(),
|
||||||
term: predicate.rhs_ty.clone().into(),
|
},
|
||||||
},
|
span: ident.span,
|
||||||
span: ident.span,
|
});
|
||||||
});
|
// Add `<Bar = RhsTy>` to `Foo`.
|
||||||
// Add `<Bar = RhsTy>` to `Foo`.
|
match &mut assoc_path.segments[len].args {
|
||||||
match &mut assoc_path.segments[len].args {
|
Some(args) => match args.deref_mut() {
|
||||||
Some(args) => match args.deref_mut() {
|
GenericArgs::Parenthesized(_) => continue,
|
||||||
GenericArgs::Parenthesized(_) => continue,
|
GenericArgs::AngleBracketed(args) => {
|
||||||
GenericArgs::AngleBracketed(args) => {
|
args.args.push(arg);
|
||||||
args.args.push(arg);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
empty_args => {
|
|
||||||
*empty_args = Some(
|
|
||||||
AngleBracketedArgs {
|
|
||||||
span: ident.span,
|
|
||||||
args: thin_vec![arg],
|
|
||||||
}
|
|
||||||
.into(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
err.assoc = Some(errors::AssociatedSuggestion {
|
|
||||||
span: predicate.span,
|
|
||||||
ident: *ident,
|
|
||||||
param: *param,
|
|
||||||
path: pprust::path_to_string(&assoc_path),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
empty_args => {
|
||||||
|
*empty_args = Some(
|
||||||
|
AngleBracketedArgs {
|
||||||
|
span: ident.span,
|
||||||
|
args: thin_vec![arg],
|
||||||
|
}
|
||||||
|
.into(),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => {}
|
err.assoc = Some(errors::AssociatedSuggestion {
|
||||||
|
span: predicate.span,
|
||||||
|
ident: *ident,
|
||||||
|
param: param.ident,
|
||||||
|
path: pprust::path_to_string(&assoc_path),
|
||||||
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1510,7 +1549,7 @@ pub fn check_crate(
|
|||||||
features,
|
features,
|
||||||
extern_mod: None,
|
extern_mod: None,
|
||||||
in_trait_impl: false,
|
in_trait_impl: false,
|
||||||
in_const_trait_impl: false,
|
in_const_trait_or_impl: false,
|
||||||
has_proc_macro_decls: false,
|
has_proc_macro_decls: false,
|
||||||
outer_impl_trait: None,
|
outer_impl_trait: None,
|
||||||
disallow_tilde_const: None,
|
disallow_tilde_const: None,
|
||||||
|
|||||||
@ -271,7 +271,7 @@ pub struct ExternItemAscii {
|
|||||||
#[diag(ast_passes_bad_c_variadic)]
|
#[diag(ast_passes_bad_c_variadic)]
|
||||||
pub struct BadCVariadic {
|
pub struct BadCVariadic {
|
||||||
#[primary_span]
|
#[primary_span]
|
||||||
pub span: Span,
|
pub span: Vec<Span>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Diagnostic)]
|
#[derive(Diagnostic)]
|
||||||
@ -583,6 +583,17 @@ pub struct ConstAndAsync {
|
|||||||
pub span: Span,
|
pub span: Span,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Diagnostic)]
|
||||||
|
#[diag(ast_passes_const_and_c_variadic)]
|
||||||
|
pub struct ConstAndCVariadic {
|
||||||
|
#[primary_span]
|
||||||
|
pub spans: Vec<Span>,
|
||||||
|
#[label(ast_passes_const)]
|
||||||
|
pub const_span: Span,
|
||||||
|
#[label(ast_passes_variadic)]
|
||||||
|
pub variadic_spans: Vec<Span>,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Diagnostic)]
|
#[derive(Diagnostic)]
|
||||||
#[diag(ast_passes_pattern_in_foreign, code = "E0130")]
|
#[diag(ast_passes_pattern_in_foreign, code = "E0130")]
|
||||||
pub struct PatternInForeign {
|
pub struct PatternInForeign {
|
||||||
|
|||||||
@ -10,53 +10,54 @@ use rustc_span::symbol::sym;
|
|||||||
use rustc_span::Span;
|
use rustc_span::Span;
|
||||||
use rustc_target::spec::abi;
|
use rustc_target::spec::abi;
|
||||||
use thin_vec::ThinVec;
|
use thin_vec::ThinVec;
|
||||||
use tracing::debug;
|
|
||||||
|
|
||||||
use crate::errors;
|
use crate::errors;
|
||||||
|
|
||||||
macro_rules! gate_feature_fn {
|
/// The common case.
|
||||||
($visitor: expr, $has_feature: expr, $span: expr, $name: expr, $explain: expr, $help: expr) => {{
|
macro_rules! gate {
|
||||||
let (visitor, has_feature, span, name, explain, help) =
|
($visitor:expr, $feature:ident, $span:expr, $explain:expr) => {{
|
||||||
(&*$visitor, $has_feature, $span, $name, $explain, $help);
|
if !$visitor.features.$feature && !$span.allows_unstable(sym::$feature) {
|
||||||
let has_feature: bool = has_feature(visitor.features);
|
feature_err(&$visitor.sess.parse_sess, sym::$feature, $span, $explain).emit();
|
||||||
debug!("gate_feature(feature = {:?}, span = {:?}); has? {}", name, span, has_feature);
|
|
||||||
if !has_feature && !span.allows_unstable($name) {
|
|
||||||
feature_err(&visitor.sess.parse_sess, name, span, explain).help(help).emit();
|
|
||||||
}
|
}
|
||||||
}};
|
}};
|
||||||
($visitor: expr, $has_feature: expr, $span: expr, $name: expr, $explain: expr) => {{
|
($visitor:expr, $feature:ident, $span:expr, $explain:expr, $help:expr) => {{
|
||||||
let (visitor, has_feature, span, name, explain) =
|
if !$visitor.features.$feature && !$span.allows_unstable(sym::$feature) {
|
||||||
(&*$visitor, $has_feature, $span, $name, $explain);
|
feature_err(&$visitor.sess.parse_sess, sym::$feature, $span, $explain)
|
||||||
let has_feature: bool = has_feature(visitor.features);
|
.help($help)
|
||||||
debug!("gate_feature(feature = {:?}, span = {:?}); has? {}", name, span, has_feature);
|
.emit();
|
||||||
if !has_feature && !span.allows_unstable($name) {
|
|
||||||
feature_err(&visitor.sess.parse_sess, name, span, explain).emit();
|
|
||||||
}
|
|
||||||
}};
|
|
||||||
(future_incompatible; $visitor: expr, $has_feature: expr, $span: expr, $name: expr, $explain: expr) => {{
|
|
||||||
let (visitor, has_feature, span, name, explain) =
|
|
||||||
(&*$visitor, $has_feature, $span, $name, $explain);
|
|
||||||
let has_feature: bool = has_feature(visitor.features);
|
|
||||||
debug!(
|
|
||||||
"gate_feature(feature = {:?}, span = {:?}); has? {} (future_incompatible)",
|
|
||||||
name, span, has_feature
|
|
||||||
);
|
|
||||||
if !has_feature && !span.allows_unstable($name) {
|
|
||||||
feature_warn(&visitor.sess.parse_sess, name, span, explain);
|
|
||||||
}
|
}
|
||||||
}};
|
}};
|
||||||
}
|
}
|
||||||
|
|
||||||
macro_rules! gate_feature_post {
|
/// The unusual case, where the `has_feature` condition is non-standard.
|
||||||
($visitor: expr, $feature: ident, $span: expr, $explain: expr, $help: expr) => {
|
macro_rules! gate_alt {
|
||||||
gate_feature_fn!($visitor, |x: &Features| x.$feature, $span, sym::$feature, $explain, $help)
|
($visitor:expr, $has_feature:expr, $name:expr, $span:expr, $explain:expr) => {{
|
||||||
};
|
if !$has_feature && !$span.allows_unstable($name) {
|
||||||
($visitor: expr, $feature: ident, $span: expr, $explain: expr) => {
|
feature_err(&$visitor.sess.parse_sess, $name, $span, $explain).emit();
|
||||||
gate_feature_fn!($visitor, |x: &Features| x.$feature, $span, sym::$feature, $explain)
|
}
|
||||||
};
|
}};
|
||||||
(future_incompatible; $visitor: expr, $feature: ident, $span: expr, $explain: expr) => {
|
}
|
||||||
gate_feature_fn!(future_incompatible; $visitor, |x: &Features| x.$feature, $span, sym::$feature, $explain)
|
|
||||||
};
|
/// The case involving a multispan.
|
||||||
|
macro_rules! gate_multi {
|
||||||
|
($visitor:expr, $feature:ident, $spans:expr, $explain:expr) => {{
|
||||||
|
if !$visitor.features.$feature {
|
||||||
|
let spans: Vec<_> =
|
||||||
|
$spans.filter(|span| !span.allows_unstable(sym::$feature)).collect();
|
||||||
|
if !spans.is_empty() {
|
||||||
|
feature_err(&$visitor.sess.parse_sess, sym::$feature, spans, $explain).emit();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}};
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The legacy case.
|
||||||
|
macro_rules! gate_legacy {
|
||||||
|
($visitor:expr, $feature:ident, $span:expr, $explain:expr) => {{
|
||||||
|
if !$visitor.features.$feature && !$span.allows_unstable(sym::$feature) {
|
||||||
|
feature_warn(&$visitor.sess.parse_sess, sym::$feature, $span, $explain);
|
||||||
|
}
|
||||||
|
}};
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn check_attribute(attr: &ast::Attribute, sess: &Session, features: &Features) {
|
pub fn check_attribute(attr: &ast::Attribute, sess: &Session, features: &Features) {
|
||||||
@ -78,7 +79,7 @@ impl<'a> PostExpansionVisitor<'a> {
|
|||||||
match symbol_unescaped {
|
match symbol_unescaped {
|
||||||
// Stable
|
// Stable
|
||||||
sym::Rust | sym::C => {}
|
sym::Rust | sym::C => {}
|
||||||
abi => gate_feature_post!(
|
abi => gate!(
|
||||||
&self,
|
&self,
|
||||||
const_extern_fn,
|
const_extern_fn,
|
||||||
span,
|
span,
|
||||||
@ -129,14 +130,14 @@ impl<'a> PostExpansionVisitor<'a> {
|
|||||||
fn visit_ty(&mut self, ty: &ast::Ty) {
|
fn visit_ty(&mut self, ty: &ast::Ty) {
|
||||||
if let ast::TyKind::ImplTrait(..) = ty.kind {
|
if let ast::TyKind::ImplTrait(..) = ty.kind {
|
||||||
if self.in_associated_ty {
|
if self.in_associated_ty {
|
||||||
gate_feature_post!(
|
gate!(
|
||||||
&self.vis,
|
&self.vis,
|
||||||
impl_trait_in_assoc_type,
|
impl_trait_in_assoc_type,
|
||||||
ty.span,
|
ty.span,
|
||||||
"`impl Trait` in associated types is unstable"
|
"`impl Trait` in associated types is unstable"
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
gate_feature_post!(
|
gate!(
|
||||||
&self.vis,
|
&self.vis,
|
||||||
type_alias_impl_trait,
|
type_alias_impl_trait,
|
||||||
ty.span,
|
ty.span,
|
||||||
@ -153,23 +154,16 @@ impl<'a> PostExpansionVisitor<'a> {
|
|||||||
fn check_late_bound_lifetime_defs(&self, params: &[ast::GenericParam]) {
|
fn check_late_bound_lifetime_defs(&self, params: &[ast::GenericParam]) {
|
||||||
// Check only lifetime parameters are present and that the lifetime
|
// Check only lifetime parameters are present and that the lifetime
|
||||||
// parameters that are present have no bounds.
|
// parameters that are present have no bounds.
|
||||||
let non_lt_param_spans: Vec<_> = params
|
let non_lt_param_spans = params.iter().filter_map(|param| match param.kind {
|
||||||
.iter()
|
ast::GenericParamKind::Lifetime { .. } => None,
|
||||||
.filter_map(|param| match param.kind {
|
_ => Some(param.ident.span),
|
||||||
ast::GenericParamKind::Lifetime { .. } => None,
|
});
|
||||||
_ => Some(param.ident.span),
|
gate_multi!(
|
||||||
})
|
&self,
|
||||||
.collect();
|
non_lifetime_binders,
|
||||||
// FIXME: gate_feature_post doesn't really handle multispans...
|
non_lt_param_spans,
|
||||||
if !non_lt_param_spans.is_empty() && !self.features.non_lifetime_binders {
|
crate::fluent_generated::ast_passes_forbidden_non_lifetime_param
|
||||||
feature_err(
|
);
|
||||||
&self.sess.parse_sess,
|
|
||||||
sym::non_lifetime_binders,
|
|
||||||
non_lt_param_spans,
|
|
||||||
crate::fluent_generated::ast_passes_forbidden_non_lifetime_param,
|
|
||||||
)
|
|
||||||
.emit();
|
|
||||||
}
|
|
||||||
for param in params {
|
for param in params {
|
||||||
if !param.bounds.is_empty() {
|
if !param.bounds.is_empty() {
|
||||||
let spans: Vec<_> = param.bounds.iter().map(|b| b.span()).collect();
|
let spans: Vec<_> = param.bounds.iter().map(|b| b.span()).collect();
|
||||||
@ -188,48 +182,39 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
|
|||||||
..
|
..
|
||||||
}) = attr_info
|
}) = attr_info
|
||||||
{
|
{
|
||||||
gate_feature_fn!(self, has_feature, attr.span, *name, *descr);
|
gate_alt!(self, has_feature(&self.features), *name, attr.span, *descr);
|
||||||
}
|
}
|
||||||
// Check unstable flavors of the `#[doc]` attribute.
|
// Check unstable flavors of the `#[doc]` attribute.
|
||||||
if attr.has_name(sym::doc) {
|
if attr.has_name(sym::doc) {
|
||||||
for nested_meta in attr.meta_item_list().unwrap_or_default() {
|
for nested_meta in attr.meta_item_list().unwrap_or_default() {
|
||||||
macro_rules! gate_doc { ($($name:ident => $feature:ident)*) => {
|
macro_rules! gate_doc { ($($s:literal { $($name:ident => $feature:ident)* })*) => {
|
||||||
$(if nested_meta.has_name(sym::$name) {
|
$($(if nested_meta.has_name(sym::$name) {
|
||||||
let msg = concat!("`#[doc(", stringify!($name), ")]` is experimental");
|
let msg = concat!("`#[doc(", stringify!($name), ")]` is ", $s);
|
||||||
gate_feature_post!(self, $feature, attr.span, msg);
|
gate!(self, $feature, attr.span, msg);
|
||||||
})*
|
})*)*
|
||||||
}}
|
}}
|
||||||
|
|
||||||
gate_doc!(
|
gate_doc!(
|
||||||
cfg => doc_cfg
|
"experimental" {
|
||||||
cfg_hide => doc_cfg_hide
|
cfg => doc_cfg
|
||||||
masked => doc_masked
|
cfg_hide => doc_cfg_hide
|
||||||
notable_trait => doc_notable_trait
|
masked => doc_masked
|
||||||
|
notable_trait => doc_notable_trait
|
||||||
|
}
|
||||||
|
"meant for internal use only" {
|
||||||
|
keyword => rustdoc_internals
|
||||||
|
fake_variadic => rustdoc_internals
|
||||||
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
if nested_meta.has_name(sym::keyword) {
|
|
||||||
let msg = "`#[doc(keyword)]` is meant for internal use only";
|
|
||||||
gate_feature_post!(self, rustdoc_internals, attr.span, msg);
|
|
||||||
}
|
|
||||||
|
|
||||||
if nested_meta.has_name(sym::fake_variadic) {
|
|
||||||
let msg = "`#[doc(fake_variadic)]` is meant for internal use only";
|
|
||||||
gate_feature_post!(self, rustdoc_internals, attr.span, msg);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if !attr.is_doc_comment()
|
if !attr.is_doc_comment()
|
||||||
&& attr.get_normal_item().path.segments.len() == 2
|
&& let [seg, _] = attr.get_normal_item().path.segments.as_slice()
|
||||||
&& attr.get_normal_item().path.segments[0].ident.name == sym::diagnostic
|
&& seg.ident.name == sym::diagnostic
|
||||||
&& !self.features.diagnostic_namespace
|
&& !self.features.diagnostic_namespace
|
||||||
{
|
{
|
||||||
let msg = "`#[diagnostic]` attribute name space is experimental";
|
let msg = "`#[diagnostic]` attribute name space is experimental";
|
||||||
gate_feature_post!(
|
gate!(self, diagnostic_namespace, seg.ident.span, msg);
|
||||||
self,
|
|
||||||
diagnostic_namespace,
|
|
||||||
attr.get_normal_item().path.segments[0].ident.span,
|
|
||||||
msg
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Emit errors for non-staged-api crates.
|
// Emit errors for non-staged-api crates.
|
||||||
@ -255,12 +240,11 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
|
|||||||
|
|
||||||
ast::ItemKind::Fn(..) => {
|
ast::ItemKind::Fn(..) => {
|
||||||
if attr::contains_name(&i.attrs, sym::start) {
|
if attr::contains_name(&i.attrs, sym::start) {
|
||||||
gate_feature_post!(
|
gate!(
|
||||||
&self,
|
&self,
|
||||||
start,
|
start,
|
||||||
i.span,
|
i.span,
|
||||||
"`#[start]` functions are experimental \
|
"`#[start]` functions are experimental and their signature may change \
|
||||||
and their signature may change \
|
|
||||||
over time"
|
over time"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -270,7 +254,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
|
|||||||
for attr in attr::filter_by_name(&i.attrs, sym::repr) {
|
for attr in attr::filter_by_name(&i.attrs, sym::repr) {
|
||||||
for item in attr.meta_item_list().unwrap_or_else(ThinVec::new) {
|
for item in attr.meta_item_list().unwrap_or_else(ThinVec::new) {
|
||||||
if item.has_name(sym::simd) {
|
if item.has_name(sym::simd) {
|
||||||
gate_feature_post!(
|
gate!(
|
||||||
&self,
|
&self,
|
||||||
repr_simd,
|
repr_simd,
|
||||||
attr.span,
|
attr.span,
|
||||||
@ -283,7 +267,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
|
|||||||
|
|
||||||
ast::ItemKind::Impl(box ast::Impl { polarity, defaultness, of_trait, .. }) => {
|
ast::ItemKind::Impl(box ast::Impl { polarity, defaultness, of_trait, .. }) => {
|
||||||
if let &ast::ImplPolarity::Negative(span) = polarity {
|
if let &ast::ImplPolarity::Negative(span) = polarity {
|
||||||
gate_feature_post!(
|
gate!(
|
||||||
&self,
|
&self,
|
||||||
negative_impls,
|
negative_impls,
|
||||||
span.to(of_trait.as_ref().map_or(span, |t| t.path.span)),
|
span.to(of_trait.as_ref().map_or(span, |t| t.path.span)),
|
||||||
@ -293,12 +277,12 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if let ast::Defaultness::Default(_) = defaultness {
|
if let ast::Defaultness::Default(_) = defaultness {
|
||||||
gate_feature_post!(&self, specialization, i.span, "specialization is unstable");
|
gate!(&self, specialization, i.span, "specialization is unstable");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
ast::ItemKind::Trait(box ast::Trait { is_auto: ast::IsAuto::Yes, .. }) => {
|
ast::ItemKind::Trait(box ast::Trait { is_auto: ast::IsAuto::Yes, .. }) => {
|
||||||
gate_feature_post!(
|
gate!(
|
||||||
&self,
|
&self,
|
||||||
auto_traits,
|
auto_traits,
|
||||||
i.span,
|
i.span,
|
||||||
@ -307,12 +291,12 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
ast::ItemKind::TraitAlias(..) => {
|
ast::ItemKind::TraitAlias(..) => {
|
||||||
gate_feature_post!(&self, trait_alias, i.span, "trait aliases are experimental");
|
gate!(&self, trait_alias, i.span, "trait aliases are experimental");
|
||||||
}
|
}
|
||||||
|
|
||||||
ast::ItemKind::MacroDef(ast::MacroDef { macro_rules: false, .. }) => {
|
ast::ItemKind::MacroDef(ast::MacroDef { macro_rules: false, .. }) => {
|
||||||
let msg = "`macro` is experimental";
|
let msg = "`macro` is experimental";
|
||||||
gate_feature_post!(&self, decl_macro, i.span, msg);
|
gate!(&self, decl_macro, i.span, msg);
|
||||||
}
|
}
|
||||||
|
|
||||||
ast::ItemKind::TyAlias(box ast::TyAlias { ty: Some(ty), .. }) => {
|
ast::ItemKind::TyAlias(box ast::TyAlias { ty: Some(ty), .. }) => {
|
||||||
@ -331,7 +315,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
|
|||||||
let link_name = attr::first_attr_value_str_by_name(&i.attrs, sym::link_name);
|
let link_name = attr::first_attr_value_str_by_name(&i.attrs, sym::link_name);
|
||||||
let links_to_llvm = link_name.is_some_and(|val| val.as_str().starts_with("llvm."));
|
let links_to_llvm = link_name.is_some_and(|val| val.as_str().starts_with("llvm."));
|
||||||
if links_to_llvm {
|
if links_to_llvm {
|
||||||
gate_feature_post!(
|
gate!(
|
||||||
&self,
|
&self,
|
||||||
link_llvm_intrinsics,
|
link_llvm_intrinsics,
|
||||||
i.span,
|
i.span,
|
||||||
@ -340,7 +324,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
ast::ForeignItemKind::TyAlias(..) => {
|
ast::ForeignItemKind::TyAlias(..) => {
|
||||||
gate_feature_post!(&self, extern_types, i.span, "extern types are experimental");
|
gate!(&self, extern_types, i.span, "extern types are experimental");
|
||||||
}
|
}
|
||||||
ast::ForeignItemKind::MacCall(..) => {}
|
ast::ForeignItemKind::MacCall(..) => {}
|
||||||
}
|
}
|
||||||
@ -356,7 +340,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
|
|||||||
self.check_late_bound_lifetime_defs(&bare_fn_ty.generic_params);
|
self.check_late_bound_lifetime_defs(&bare_fn_ty.generic_params);
|
||||||
}
|
}
|
||||||
ast::TyKind::Never => {
|
ast::TyKind::Never => {
|
||||||
gate_feature_post!(&self, never_type, ty.span, "the `!` type is experimental");
|
gate!(&self, never_type, ty.span, "the `!` type is experimental");
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
@ -389,7 +373,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
|
|||||||
fn visit_expr(&mut self, e: &'a ast::Expr) {
|
fn visit_expr(&mut self, e: &'a ast::Expr) {
|
||||||
match e.kind {
|
match e.kind {
|
||||||
ast::ExprKind::TryBlock(_) => {
|
ast::ExprKind::TryBlock(_) => {
|
||||||
gate_feature_post!(&self, try_blocks, e.span, "`try` expression is experimental");
|
gate!(&self, try_blocks, e.span, "`try` expression is experimental");
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
@ -405,7 +389,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
|
|||||||
_ => pat,
|
_ => pat,
|
||||||
};
|
};
|
||||||
if let PatKind::Range(Some(_), None, Spanned { .. }) = inner_pat.kind {
|
if let PatKind::Range(Some(_), None, Spanned { .. }) = inner_pat.kind {
|
||||||
gate_feature_post!(
|
gate!(
|
||||||
&self,
|
&self,
|
||||||
half_open_range_patterns_in_slices,
|
half_open_range_patterns_in_slices,
|
||||||
pat.span,
|
pat.span,
|
||||||
@ -415,15 +399,10 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
PatKind::Box(..) => {
|
PatKind::Box(..) => {
|
||||||
gate_feature_post!(
|
gate!(&self, box_patterns, pattern.span, "box pattern syntax is experimental");
|
||||||
&self,
|
|
||||||
box_patterns,
|
|
||||||
pattern.span,
|
|
||||||
"box pattern syntax is experimental"
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
PatKind::Range(_, Some(_), Spanned { node: RangeEnd::Excluded, .. }) => {
|
PatKind::Range(_, Some(_), Spanned { node: RangeEnd::Excluded, .. }) => {
|
||||||
gate_feature_post!(
|
gate!(
|
||||||
&self,
|
&self,
|
||||||
exclusive_range_pattern,
|
exclusive_range_pattern,
|
||||||
pattern.span,
|
pattern.span,
|
||||||
@ -451,7 +430,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if fn_kind.ctxt() != Some(FnCtxt::Foreign) && fn_kind.decl().c_variadic() {
|
if fn_kind.ctxt() != Some(FnCtxt::Foreign) && fn_kind.decl().c_variadic() {
|
||||||
gate_feature_post!(&self, c_variadic, span, "C-variadic functions are unstable");
|
gate!(&self, c_variadic, span, "C-variadic functions are unstable");
|
||||||
}
|
}
|
||||||
|
|
||||||
visit::walk_fn(self, fn_kind)
|
visit::walk_fn(self, fn_kind)
|
||||||
@ -463,14 +442,14 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
|
|||||||
&& args.inputs.is_empty()
|
&& args.inputs.is_empty()
|
||||||
&& matches!(args.output, ast::FnRetTy::Default(..))
|
&& matches!(args.output, ast::FnRetTy::Default(..))
|
||||||
{
|
{
|
||||||
gate_feature_post!(
|
gate!(
|
||||||
&self,
|
&self,
|
||||||
return_type_notation,
|
return_type_notation,
|
||||||
constraint.span,
|
constraint.span,
|
||||||
"return type notation is experimental"
|
"return type notation is experimental"
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
gate_feature_post!(
|
gate!(
|
||||||
&self,
|
&self,
|
||||||
associated_type_bounds,
|
associated_type_bounds,
|
||||||
constraint.span,
|
constraint.span,
|
||||||
@ -486,7 +465,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
|
|||||||
ast::AssocItemKind::Fn(_) => true,
|
ast::AssocItemKind::Fn(_) => true,
|
||||||
ast::AssocItemKind::Type(box ast::TyAlias { ty, .. }) => {
|
ast::AssocItemKind::Type(box ast::TyAlias { ty, .. }) => {
|
||||||
if let (Some(_), AssocCtxt::Trait) = (ty, ctxt) {
|
if let (Some(_), AssocCtxt::Trait) = (ty, ctxt) {
|
||||||
gate_feature_post!(
|
gate!(
|
||||||
&self,
|
&self,
|
||||||
associated_type_defaults,
|
associated_type_defaults,
|
||||||
i.span,
|
i.span,
|
||||||
@ -502,11 +481,11 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
|
|||||||
};
|
};
|
||||||
if let ast::Defaultness::Default(_) = i.kind.defaultness() {
|
if let ast::Defaultness::Default(_) = i.kind.defaultness() {
|
||||||
// Limit `min_specialization` to only specializing functions.
|
// Limit `min_specialization` to only specializing functions.
|
||||||
gate_feature_fn!(
|
gate_alt!(
|
||||||
&self,
|
&self,
|
||||||
|x: &Features| x.specialization || (is_fn && x.min_specialization),
|
self.features.specialization || (is_fn && self.features.min_specialization),
|
||||||
i.span,
|
|
||||||
sym::specialization,
|
sym::specialization,
|
||||||
|
i.span,
|
||||||
"specialization is unstable"
|
"specialization is unstable"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -521,17 +500,17 @@ pub fn check_crate(krate: &ast::Crate, sess: &Session, features: &Features) {
|
|||||||
|
|
||||||
let spans = sess.parse_sess.gated_spans.spans.borrow();
|
let spans = sess.parse_sess.gated_spans.spans.borrow();
|
||||||
macro_rules! gate_all {
|
macro_rules! gate_all {
|
||||||
($gate:ident, $msg:literal, $help:literal) => {
|
|
||||||
if let Some(spans) = spans.get(&sym::$gate) {
|
|
||||||
for span in spans {
|
|
||||||
gate_feature_post!(&visitor, $gate, *span, $msg, $help);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
($gate:ident, $msg:literal) => {
|
($gate:ident, $msg:literal) => {
|
||||||
if let Some(spans) = spans.get(&sym::$gate) {
|
if let Some(spans) = spans.get(&sym::$gate) {
|
||||||
for span in spans {
|
for span in spans {
|
||||||
gate_feature_post!(&visitor, $gate, *span, $msg);
|
gate!(&visitor, $gate, *span, $msg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
($gate:ident, $msg:literal, $help:literal) => {
|
||||||
|
if let Some(spans) = spans.get(&sym::$gate) {
|
||||||
|
for span in spans {
|
||||||
|
gate!(&visitor, $gate, *span, $msg, $help);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -554,7 +533,12 @@ pub fn check_crate(krate: &ast::Crate, sess: &Session, features: &Features) {
|
|||||||
"consider removing `for<...>`"
|
"consider removing `for<...>`"
|
||||||
);
|
);
|
||||||
gate_all!(more_qualified_paths, "usage of qualified paths in this context is experimental");
|
gate_all!(more_qualified_paths, "usage of qualified paths in this context is experimental");
|
||||||
gate_all!(generators, "yield syntax is experimental");
|
for &span in spans.get(&sym::yield_expr).iter().copied().flatten() {
|
||||||
|
if !span.at_least_rust_2024() {
|
||||||
|
gate!(&visitor, coroutines, span, "yield syntax is experimental");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
gate_all!(gen_blocks, "gen blocks are experimental");
|
||||||
gate_all!(raw_ref_op, "raw address of syntax is experimental");
|
gate_all!(raw_ref_op, "raw address of syntax is experimental");
|
||||||
gate_all!(const_trait_impl, "const trait impls are experimental");
|
gate_all!(const_trait_impl, "const trait impls are experimental");
|
||||||
gate_all!(
|
gate_all!(
|
||||||
@ -585,7 +569,7 @@ pub fn check_crate(krate: &ast::Crate, sess: &Session, features: &Features) {
|
|||||||
macro_rules! gate_all_legacy_dont_use {
|
macro_rules! gate_all_legacy_dont_use {
|
||||||
($gate:ident, $msg:literal) => {
|
($gate:ident, $msg:literal) => {
|
||||||
for span in spans.get(&sym::$gate).unwrap_or(&vec![]) {
|
for span in spans.get(&sym::$gate).unwrap_or(&vec![]) {
|
||||||
gate_feature_post!(future_incompatible; &visitor, $gate, *span, $msg);
|
gate_legacy!(&visitor, $gate, *span, $msg);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@ -603,6 +587,7 @@ pub fn check_crate(krate: &ast::Crate, sess: &Session, features: &Features) {
|
|||||||
"exclusive range pattern syntax is experimental"
|
"exclusive range pattern syntax is experimental"
|
||||||
);
|
);
|
||||||
gate_all_legacy_dont_use!(try_blocks, "`try` blocks are unstable");
|
gate_all_legacy_dont_use!(try_blocks, "`try` blocks are unstable");
|
||||||
|
gate_all_legacy_dont_use!(auto_traits, "`auto` traits are unstable");
|
||||||
|
|
||||||
visit::walk_crate(&mut visitor, krate);
|
visit::walk_crate(&mut visitor, krate);
|
||||||
}
|
}
|
||||||
@ -657,7 +642,7 @@ fn check_incompatible_features(sess: &Session, features: &Features) {
|
|||||||
|
|
||||||
for (f1, f2) in rustc_feature::INCOMPATIBLE_FEATURES
|
for (f1, f2) in rustc_feature::INCOMPATIBLE_FEATURES
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|&&(f1, f2)| features.enabled(f1) && features.enabled(f2))
|
.filter(|&&(f1, f2)| features.active(f1) && features.active(f2))
|
||||||
{
|
{
|
||||||
if let Some((f1_name, f1_span)) = declared_features.clone().find(|(name, _)| name == f1) {
|
if let Some((f1_name, f1_span)) = declared_features.clone().find(|(name, _)| name == f1) {
|
||||||
if let Some((f2_name, f2_span)) = declared_features.clone().find(|(name, _)| name == f2)
|
if let Some((f2_name, f2_span)) = declared_features.clone().find(|(name, _)| name == f2)
|
||||||
|
|||||||
@ -4,6 +4,9 @@
|
|||||||
//!
|
//!
|
||||||
//! The crate also contains other misc AST visitors, e.g. `node_count` and `show_span`.
|
//! The crate also contains other misc AST visitors, e.g. `node_count` and `show_span`.
|
||||||
|
|
||||||
|
#![cfg_attr(not(bootstrap), allow(internal_features))]
|
||||||
|
#![cfg_attr(not(bootstrap), doc(rust_logo))]
|
||||||
|
#![cfg_attr(not(bootstrap), feature(rustdoc_internals))]
|
||||||
#![feature(box_patterns)]
|
#![feature(box_patterns)]
|
||||||
#![feature(if_let_guard)]
|
#![feature(if_let_guard)]
|
||||||
#![feature(iter_is_partitioned)]
|
#![feature(iter_is_partitioned)]
|
||||||
|
|||||||
@ -3,9 +3,9 @@ name = "rustc_ast_pretty"
|
|||||||
version = "0.0.0"
|
version = "0.0.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
[lib]
|
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
# tidy-alphabetical-start
|
||||||
rustc_ast = { path = "../rustc_ast" }
|
rustc_ast = { path = "../rustc_ast" }
|
||||||
rustc_span = { path = "../rustc_span" }
|
rustc_span = { path = "../rustc_span" }
|
||||||
thin-vec = "0.2.12"
|
thin-vec = "0.2.12"
|
||||||
|
# tidy-alphabetical-end
|
||||||
|
|||||||
@ -1,3 +1,6 @@
|
|||||||
|
#![cfg_attr(not(bootstrap), allow(internal_features))]
|
||||||
|
#![cfg_attr(not(bootstrap), feature(rustdoc_internals))]
|
||||||
|
#![cfg_attr(not(bootstrap), doc(rust_logo))]
|
||||||
#![deny(rustc::untranslatable_diagnostic)]
|
#![deny(rustc::untranslatable_diagnostic)]
|
||||||
#![deny(rustc::diagnostic_outside_of_impl)]
|
#![deny(rustc::diagnostic_outside_of_impl)]
|
||||||
#![feature(associated_type_bounds)]
|
#![feature(associated_type_bounds)]
|
||||||
|
|||||||
@ -146,37 +146,49 @@ pub fn print_crate<'a>(
|
|||||||
s.s.eof()
|
s.s.eof()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This makes printed token streams look slightly nicer,
|
/// Should two consecutive tokens be printed with a space between them?
|
||||||
/// and also addresses some specific regressions described in #63896 and #73345.
|
///
|
||||||
fn tt_prepend_space(tt: &TokenTree, prev: &TokenTree) -> bool {
|
/// Note: some old proc macros parse pretty-printed output, so changes here can
|
||||||
if let TokenTree::Token(token, _) = prev {
|
/// break old code. For example:
|
||||||
// No space after these tokens, e.g. `x.y`, `$e`
|
/// - #63896: `#[allow(unused,` must be printed rather than `#[allow(unused ,`
|
||||||
// (The carets point to `prev`.) ^ ^
|
/// - #73345: `#[allow(unused)] must be printed rather than `# [allow(unused)]
|
||||||
if matches!(token.kind, token::Dot | token::Dollar) {
|
///
|
||||||
return false;
|
fn space_between(tt1: &TokenTree, tt2: &TokenTree) -> bool {
|
||||||
}
|
use token::*;
|
||||||
if let token::DocComment(comment_kind, ..) = token.kind {
|
use Delimiter::*;
|
||||||
return comment_kind != CommentKind::Line;
|
use TokenTree::Delimited as Del;
|
||||||
}
|
use TokenTree::Token as Tok;
|
||||||
}
|
|
||||||
match tt {
|
// Each match arm has one or more examples in comments. The default is to
|
||||||
// No space before these tokens, e.g. `foo,`, `println!`, `x.y`
|
// insert space between adjacent tokens, except for the cases listed in
|
||||||
// (The carets point to `token`.) ^ ^ ^
|
// this match.
|
||||||
|
match (tt1, tt2) {
|
||||||
|
// No space after line doc comments.
|
||||||
|
(Tok(Token { kind: DocComment(CommentKind::Line, ..), .. }, _), _) => false,
|
||||||
|
|
||||||
|
// `.` + ANYTHING: `x.y`, `tup.0`
|
||||||
|
// `$` + ANYTHING: `$e`
|
||||||
|
(Tok(Token { kind: Dot | Dollar, .. }, _), _) => false,
|
||||||
|
|
||||||
|
// ANYTHING + `,`: `foo,`
|
||||||
|
// ANYTHING + `.`: `x.y`, `tup.0`
|
||||||
|
// ANYTHING + `!`: `foo! { ... }`
|
||||||
//
|
//
|
||||||
// FIXME: having `Not` here works well for macro invocations like
|
// FIXME: Incorrect cases:
|
||||||
// `println!()`, but is bad when `!` means "logical not" or "the never
|
// - Logical not: `x =! y`, `if! x { f(); }`
|
||||||
// type", where the lack of space causes ugliness like this:
|
// - Never type: `Fn() ->!`
|
||||||
// `Fn() ->!`, `x =! y`, `if! x { f(); }`.
|
(_, Tok(Token { kind: Comma | Dot | Not, .. }, _)) => false,
|
||||||
TokenTree::Token(token, _) => !matches!(token.kind, token::Comma | token::Not | token::Dot),
|
|
||||||
// No space before parentheses if preceded by these tokens, e.g. `foo(...)`
|
// IDENT + `(`: `f(3)`
|
||||||
TokenTree::Delimited(_, Delimiter::Parenthesis, _) => {
|
//
|
||||||
!matches!(prev, TokenTree::Token(Token { kind: token::Ident(..), .. }, _))
|
// FIXME: Incorrect cases:
|
||||||
}
|
// - Let: `let(a, b) = (1, 2)`
|
||||||
// No space before brackets if preceded by these tokens, e.g. `#[...]`
|
(Tok(Token { kind: Ident(..), .. }, _), Del(_, Parenthesis, _)) => false,
|
||||||
TokenTree::Delimited(_, Delimiter::Bracket, _) => {
|
|
||||||
!matches!(prev, TokenTree::Token(Token { kind: token::Pound, .. }, _))
|
// `#` + `[`: `#[attr]`
|
||||||
}
|
(Tok(Token { kind: Pound, .. }, _), Del(_, Bracket, _)) => false,
|
||||||
TokenTree::Delimited(..) => true,
|
|
||||||
|
_ => true,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -575,7 +587,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
|
|||||||
while let Some(tt) = iter.next() {
|
while let Some(tt) = iter.next() {
|
||||||
self.print_tt(tt, convert_dollar_crate);
|
self.print_tt(tt, convert_dollar_crate);
|
||||||
if let Some(next) = iter.peek() {
|
if let Some(next) = iter.peek() {
|
||||||
if tt_prepend_space(next, tt) {
|
if space_between(tt, next) {
|
||||||
self.space();
|
self.space();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -445,8 +445,8 @@ impl<'a> State<'a> {
|
|||||||
self.ibox(0);
|
self.ibox(0);
|
||||||
self.print_block_with_attrs(blk, attrs);
|
self.print_block_with_attrs(blk, attrs);
|
||||||
}
|
}
|
||||||
ast::ExprKind::Async(capture_clause, blk) => {
|
ast::ExprKind::Gen(capture_clause, blk, kind) => {
|
||||||
self.word_nbsp("async");
|
self.word_nbsp(kind.modifier());
|
||||||
self.print_capture_clause(*capture_clause);
|
self.print_capture_clause(*capture_clause);
|
||||||
// cbox/ibox in analogy to the `ExprKind::Block` arm above
|
// cbox/ibox in analogy to the `ExprKind::Block` arm above
|
||||||
self.cbox(0);
|
self.cbox(0);
|
||||||
@ -673,7 +673,7 @@ impl<'a> State<'a> {
|
|||||||
|
|
||||||
fn print_capture_clause(&mut self, capture_clause: ast::CaptureBy) {
|
fn print_capture_clause(&mut self, capture_clause: ast::CaptureBy) {
|
||||||
match capture_clause {
|
match capture_clause {
|
||||||
ast::CaptureBy::Value => self.word_space("move"),
|
ast::CaptureBy::Value { .. } => self.word_space("move"),
|
||||||
ast::CaptureBy::Ref => {}
|
ast::CaptureBy::Ref => {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -684,8 +684,8 @@ pub fn reconstruct_format_args_template_string(pieces: &[FormatArgsPiece]) -> St
|
|||||||
for piece in pieces {
|
for piece in pieces {
|
||||||
match piece {
|
match piece {
|
||||||
FormatArgsPiece::Literal(s) => {
|
FormatArgsPiece::Literal(s) => {
|
||||||
for c in s.as_str().escape_debug() {
|
for c in s.as_str().chars() {
|
||||||
template.push(c);
|
template.extend(c.escape_debug());
|
||||||
if let '{' | '}' = c {
|
if let '{' | '}' = c {
|
||||||
template.push(c);
|
template.push(c);
|
||||||
}
|
}
|
||||||
|
|||||||
@ -3,17 +3,17 @@ name = "rustc_attr"
|
|||||||
version = "0.0.0"
|
version = "0.0.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
[lib]
|
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
# tidy-alphabetical-start
|
||||||
|
rustc_ast = { path = "../rustc_ast" }
|
||||||
rustc_ast_pretty = { path = "../rustc_ast_pretty" }
|
rustc_ast_pretty = { path = "../rustc_ast_pretty" }
|
||||||
rustc_serialize = { path = "../rustc_serialize" }
|
|
||||||
rustc_errors = { path = "../rustc_errors" }
|
|
||||||
rustc_fluent_macro = { path = "../rustc_fluent_macro" }
|
|
||||||
rustc_span = { path = "../rustc_span" }
|
|
||||||
rustc_data_structures = { path = "../rustc_data_structures" }
|
rustc_data_structures = { path = "../rustc_data_structures" }
|
||||||
|
rustc_errors = { path = "../rustc_errors" }
|
||||||
rustc_feature = { path = "../rustc_feature" }
|
rustc_feature = { path = "../rustc_feature" }
|
||||||
|
rustc_fluent_macro = { path = "../rustc_fluent_macro" }
|
||||||
rustc_lexer = { path = "../rustc_lexer" }
|
rustc_lexer = { path = "../rustc_lexer" }
|
||||||
rustc_macros = { path = "../rustc_macros" }
|
rustc_macros = { path = "../rustc_macros" }
|
||||||
|
rustc_serialize = { path = "../rustc_serialize" }
|
||||||
rustc_session = { path = "../rustc_session" }
|
rustc_session = { path = "../rustc_session" }
|
||||||
rustc_ast = { path = "../rustc_ast" }
|
rustc_span = { path = "../rustc_span" }
|
||||||
|
# tidy-alphabetical-end
|
||||||
|
|||||||
@ -58,6 +58,9 @@ attr_invalid_repr_hint_no_paren =
|
|||||||
attr_invalid_repr_hint_no_value =
|
attr_invalid_repr_hint_no_value =
|
||||||
invalid representation hint: `{$name}` does not take a value
|
invalid representation hint: `{$name}` does not take a value
|
||||||
|
|
||||||
|
attr_invalid_since =
|
||||||
|
'since' must be a Rust version number, such as "1.31.0"
|
||||||
|
|
||||||
attr_missing_feature =
|
attr_missing_feature =
|
||||||
missing 'feature'
|
missing 'feature'
|
||||||
|
|
||||||
|
|||||||
@ -3,13 +3,14 @@
|
|||||||
use rustc_ast::{self as ast, attr};
|
use rustc_ast::{self as ast, attr};
|
||||||
use rustc_ast::{Attribute, LitKind, MetaItem, MetaItemKind, MetaItemLit, NestedMetaItem, NodeId};
|
use rustc_ast::{Attribute, LitKind, MetaItem, MetaItemKind, MetaItemLit, NestedMetaItem, NodeId};
|
||||||
use rustc_ast_pretty::pprust;
|
use rustc_ast_pretty::pprust;
|
||||||
|
use rustc_errors::ErrorGuaranteed;
|
||||||
use rustc_feature::{find_gated_cfg, is_builtin_attr_name, Features, GatedCfg};
|
use rustc_feature::{find_gated_cfg, is_builtin_attr_name, Features, GatedCfg};
|
||||||
use rustc_macros::HashStable_Generic;
|
use rustc_macros::HashStable_Generic;
|
||||||
use rustc_session::config::ExpectedValues;
|
use rustc_session::config::ExpectedValues;
|
||||||
use rustc_session::lint::builtin::UNEXPECTED_CFGS;
|
use rustc_session::lint::builtin::UNEXPECTED_CFGS;
|
||||||
use rustc_session::lint::BuiltinLintDiagnostics;
|
use rustc_session::lint::BuiltinLintDiagnostics;
|
||||||
use rustc_session::parse::{feature_err, ParseSess};
|
use rustc_session::parse::{feature_err, ParseSess};
|
||||||
use rustc_session::Session;
|
use rustc_session::{RustcVersion, Session};
|
||||||
use rustc_span::hygiene::Transparency;
|
use rustc_span::hygiene::Transparency;
|
||||||
use rustc_span::{symbol::sym, symbol::Symbol, Span};
|
use rustc_span::{symbol::sym, symbol::Symbol, Span};
|
||||||
use std::num::NonZeroU32;
|
use std::num::NonZeroU32;
|
||||||
@ -22,25 +23,10 @@ use crate::session_diagnostics::{self, IncorrectReprFormatGenericCause};
|
|||||||
/// For more, see [this pull request](https://github.com/rust-lang/rust/pull/100591).
|
/// For more, see [this pull request](https://github.com/rust-lang/rust/pull/100591).
|
||||||
pub const VERSION_PLACEHOLDER: &str = "CURRENT_RUSTC_VERSION";
|
pub const VERSION_PLACEHOLDER: &str = "CURRENT_RUSTC_VERSION";
|
||||||
|
|
||||||
pub fn rust_version_symbol() -> Symbol {
|
|
||||||
let version = option_env!("CFG_RELEASE").unwrap_or("<current>");
|
|
||||||
Symbol::intern(&version)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn is_builtin_attr(attr: &Attribute) -> bool {
|
pub fn is_builtin_attr(attr: &Attribute) -> bool {
|
||||||
attr.is_doc_comment() || attr.ident().is_some_and(|ident| is_builtin_attr_name(ident.name))
|
attr.is_doc_comment() || attr.ident().is_some_and(|ident| is_builtin_attr_name(ident.name))
|
||||||
}
|
}
|
||||||
|
|
||||||
enum AttrError {
|
|
||||||
MultipleItem(String),
|
|
||||||
UnknownMetaItem(String, &'static [&'static str]),
|
|
||||||
MissingSince,
|
|
||||||
NonIdentFeature,
|
|
||||||
MissingFeature,
|
|
||||||
MultipleStabilityLevels,
|
|
||||||
UnsupportedLiteral(UnsupportedLiteralReason, /* is_bytestr */ bool),
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) enum UnsupportedLiteralReason {
|
pub(crate) enum UnsupportedLiteralReason {
|
||||||
Generic,
|
Generic,
|
||||||
CfgString,
|
CfgString,
|
||||||
@ -48,37 +34,6 @@ pub(crate) enum UnsupportedLiteralReason {
|
|||||||
DeprecatedKvPair,
|
DeprecatedKvPair,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn handle_errors(sess: &ParseSess, span: Span, error: AttrError) {
|
|
||||||
match error {
|
|
||||||
AttrError::MultipleItem(item) => {
|
|
||||||
sess.emit_err(session_diagnostics::MultipleItem { span, item });
|
|
||||||
}
|
|
||||||
AttrError::UnknownMetaItem(item, expected) => {
|
|
||||||
sess.emit_err(session_diagnostics::UnknownMetaItem { span, item, expected });
|
|
||||||
}
|
|
||||||
AttrError::MissingSince => {
|
|
||||||
sess.emit_err(session_diagnostics::MissingSince { span });
|
|
||||||
}
|
|
||||||
AttrError::NonIdentFeature => {
|
|
||||||
sess.emit_err(session_diagnostics::NonIdentFeature { span });
|
|
||||||
}
|
|
||||||
AttrError::MissingFeature => {
|
|
||||||
sess.emit_err(session_diagnostics::MissingFeature { span });
|
|
||||||
}
|
|
||||||
AttrError::MultipleStabilityLevels => {
|
|
||||||
sess.emit_err(session_diagnostics::MultipleStabilityLevels { span });
|
|
||||||
}
|
|
||||||
AttrError::UnsupportedLiteral(reason, is_bytestr) => {
|
|
||||||
sess.emit_err(session_diagnostics::UnsupportedLiteral {
|
|
||||||
span,
|
|
||||||
reason,
|
|
||||||
is_bytestr,
|
|
||||||
start_point_span: sess.source_map().start_point(span),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Copy, Clone, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
|
#[derive(Copy, Clone, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
|
||||||
pub enum InlineAttr {
|
pub enum InlineAttr {
|
||||||
None,
|
None,
|
||||||
@ -162,7 +117,7 @@ pub enum StabilityLevel {
|
|||||||
is_soft: bool,
|
is_soft: bool,
|
||||||
/// If part of a feature is stabilized and a new feature is added for the remaining parts,
|
/// If part of a feature is stabilized and a new feature is added for the remaining parts,
|
||||||
/// then the `implied_by` attribute is used to indicate which now-stable feature previously
|
/// then the `implied_by` attribute is used to indicate which now-stable feature previously
|
||||||
/// contained a item.
|
/// contained an item.
|
||||||
///
|
///
|
||||||
/// ```pseudo-Rust
|
/// ```pseudo-Rust
|
||||||
/// #[unstable(feature = "foo", issue = "...")]
|
/// #[unstable(feature = "foo", issue = "...")]
|
||||||
@ -184,13 +139,24 @@ pub enum StabilityLevel {
|
|||||||
/// `#[stable]`
|
/// `#[stable]`
|
||||||
Stable {
|
Stable {
|
||||||
/// Rust release which stabilized this feature.
|
/// Rust release which stabilized this feature.
|
||||||
since: Symbol,
|
since: StableSince,
|
||||||
/// Is this item allowed to be referred to on stable, despite being contained in unstable
|
/// Is this item allowed to be referred to on stable, despite being contained in unstable
|
||||||
/// modules?
|
/// modules?
|
||||||
allowed_through_unstable_modules: bool,
|
allowed_through_unstable_modules: bool,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Rust release in which a feature is stabilized.
|
||||||
|
#[derive(Encodable, Decodable, PartialEq, Copy, Clone, Debug, Eq, Hash)]
|
||||||
|
#[derive(HashStable_Generic)]
|
||||||
|
pub enum StableSince {
|
||||||
|
Version(RustcVersion),
|
||||||
|
/// Stabilized in the upcoming version, whatever number that is.
|
||||||
|
Current,
|
||||||
|
/// Failed to parse a stabilization version.
|
||||||
|
Err,
|
||||||
|
}
|
||||||
|
|
||||||
impl StabilityLevel {
|
impl StabilityLevel {
|
||||||
pub fn is_unstable(&self) -> bool {
|
pub fn is_unstable(&self) -> bool {
|
||||||
matches!(self, StabilityLevel::Unstable { .. })
|
matches!(self, StabilityLevel::Unstable { .. })
|
||||||
@ -241,7 +207,7 @@ pub fn find_stability(
|
|||||||
sym::rustc_allowed_through_unstable_modules => allowed_through_unstable_modules = true,
|
sym::rustc_allowed_through_unstable_modules => allowed_through_unstable_modules = true,
|
||||||
sym::unstable => {
|
sym::unstable => {
|
||||||
if stab.is_some() {
|
if stab.is_some() {
|
||||||
handle_errors(&sess.parse_sess, attr.span, AttrError::MultipleStabilityLevels);
|
sess.emit_err(session_diagnostics::MultipleStabilityLevels { span: attr.span });
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -251,7 +217,7 @@ pub fn find_stability(
|
|||||||
}
|
}
|
||||||
sym::stable => {
|
sym::stable => {
|
||||||
if stab.is_some() {
|
if stab.is_some() {
|
||||||
handle_errors(&sess.parse_sess, attr.span, AttrError::MultipleStabilityLevels);
|
sess.emit_err(session_diagnostics::MultipleStabilityLevels { span: attr.span });
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
if let Some((feature, level)) = parse_stability(sess, attr) {
|
if let Some((feature, level)) = parse_stability(sess, attr) {
|
||||||
@ -295,7 +261,7 @@ pub fn find_const_stability(
|
|||||||
sym::rustc_promotable => promotable = true,
|
sym::rustc_promotable => promotable = true,
|
||||||
sym::rustc_const_unstable => {
|
sym::rustc_const_unstable => {
|
||||||
if const_stab.is_some() {
|
if const_stab.is_some() {
|
||||||
handle_errors(&sess.parse_sess, attr.span, AttrError::MultipleStabilityLevels);
|
sess.emit_err(session_diagnostics::MultipleStabilityLevels { span: attr.span });
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -306,7 +272,7 @@ pub fn find_const_stability(
|
|||||||
}
|
}
|
||||||
sym::rustc_const_stable => {
|
sym::rustc_const_stable => {
|
||||||
if const_stab.is_some() {
|
if const_stab.is_some() {
|
||||||
handle_errors(&sess.parse_sess, attr.span, AttrError::MultipleStabilityLevels);
|
sess.emit_err(session_diagnostics::MultipleStabilityLevels { span: attr.span });
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
if let Some((feature, level)) = parse_stability(sess, attr) {
|
if let Some((feature, level)) = parse_stability(sess, attr) {
|
||||||
@ -340,7 +306,7 @@ pub fn find_body_stability(
|
|||||||
for attr in attrs {
|
for attr in attrs {
|
||||||
if attr.has_name(sym::rustc_default_body_unstable) {
|
if attr.has_name(sym::rustc_default_body_unstable) {
|
||||||
if body_stab.is_some() {
|
if body_stab.is_some() {
|
||||||
handle_errors(&sess.parse_sess, attr.span, AttrError::MultipleStabilityLevels);
|
sess.emit_err(session_diagnostics::MultipleStabilityLevels { span: attr.span });
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -353,83 +319,83 @@ pub fn find_body_stability(
|
|||||||
body_stab
|
body_stab
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn insert_or_error(sess: &Session, meta: &MetaItem, item: &mut Option<Symbol>) -> Option<()> {
|
||||||
|
if item.is_some() {
|
||||||
|
sess.emit_err(session_diagnostics::MultipleItem {
|
||||||
|
span: meta.span,
|
||||||
|
item: pprust::path_to_string(&meta.path),
|
||||||
|
});
|
||||||
|
None
|
||||||
|
} else if let Some(v) = meta.value_str() {
|
||||||
|
*item = Some(v);
|
||||||
|
Some(())
|
||||||
|
} else {
|
||||||
|
sess.emit_err(session_diagnostics::IncorrectMetaItem { span: meta.span });
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Read the content of a `stable`/`rustc_const_stable` attribute, and return the feature name and
|
/// Read the content of a `stable`/`rustc_const_stable` attribute, and return the feature name and
|
||||||
/// its stability information.
|
/// its stability information.
|
||||||
fn parse_stability(sess: &Session, attr: &Attribute) -> Option<(Symbol, StabilityLevel)> {
|
fn parse_stability(sess: &Session, attr: &Attribute) -> Option<(Symbol, StabilityLevel)> {
|
||||||
let meta = attr.meta()?;
|
let meta = attr.meta()?;
|
||||||
let MetaItem { kind: MetaItemKind::List(ref metas), .. } = meta else { return None };
|
let MetaItem { kind: MetaItemKind::List(ref metas), .. } = meta else { return None };
|
||||||
let insert_or_error = |meta: &MetaItem, item: &mut Option<Symbol>| {
|
|
||||||
if item.is_some() {
|
|
||||||
handle_errors(
|
|
||||||
&sess.parse_sess,
|
|
||||||
meta.span,
|
|
||||||
AttrError::MultipleItem(pprust::path_to_string(&meta.path)),
|
|
||||||
);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
if let Some(v) = meta.value_str() {
|
|
||||||
*item = Some(v);
|
|
||||||
true
|
|
||||||
} else {
|
|
||||||
sess.emit_err(session_diagnostics::IncorrectMetaItem { span: meta.span });
|
|
||||||
false
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut feature = None;
|
let mut feature = None;
|
||||||
let mut since = None;
|
let mut since = None;
|
||||||
for meta in metas {
|
for meta in metas {
|
||||||
let Some(mi) = meta.meta_item() else {
|
let Some(mi) = meta.meta_item() else {
|
||||||
handle_errors(
|
sess.emit_err(session_diagnostics::UnsupportedLiteral {
|
||||||
&sess.parse_sess,
|
span: meta.span(),
|
||||||
meta.span(),
|
reason: UnsupportedLiteralReason::Generic,
|
||||||
AttrError::UnsupportedLiteral(UnsupportedLiteralReason::Generic, false),
|
is_bytestr: false,
|
||||||
);
|
start_point_span: sess.source_map().start_point(meta.span()),
|
||||||
|
});
|
||||||
return None;
|
return None;
|
||||||
};
|
};
|
||||||
|
|
||||||
match mi.name_or_empty() {
|
match mi.name_or_empty() {
|
||||||
sym::feature => {
|
sym::feature => insert_or_error(sess, mi, &mut feature)?,
|
||||||
if !insert_or_error(mi, &mut feature) {
|
sym::since => insert_or_error(sess, mi, &mut since)?,
|
||||||
return None;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
sym::since => {
|
|
||||||
if !insert_or_error(mi, &mut since) {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => {
|
_ => {
|
||||||
handle_errors(
|
sess.emit_err(session_diagnostics::UnknownMetaItem {
|
||||||
&sess.parse_sess,
|
span: meta.span(),
|
||||||
meta.span(),
|
item: pprust::path_to_string(&mi.path),
|
||||||
AttrError::UnknownMetaItem(
|
expected: &["feature", "since"],
|
||||||
pprust::path_to_string(&mi.path),
|
});
|
||||||
&["feature", "since"],
|
|
||||||
),
|
|
||||||
);
|
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(s) = since && s.as_str() == VERSION_PLACEHOLDER {
|
let feature = match feature {
|
||||||
since = Some(rust_version_symbol());
|
Some(feature) if rustc_lexer::is_ident(feature.as_str()) => Ok(feature),
|
||||||
}
|
Some(_bad_feature) => {
|
||||||
|
Err(sess.emit_err(session_diagnostics::NonIdentFeature { span: attr.span }))
|
||||||
|
}
|
||||||
|
None => Err(sess.emit_err(session_diagnostics::MissingFeature { span: attr.span })),
|
||||||
|
};
|
||||||
|
|
||||||
match (feature, since) {
|
let since = if let Some(since) = since {
|
||||||
(Some(feature), Some(since)) => {
|
if since.as_str() == VERSION_PLACEHOLDER {
|
||||||
|
StableSince::Current
|
||||||
|
} else if let Some(version) = parse_version(since) {
|
||||||
|
StableSince::Version(version)
|
||||||
|
} else {
|
||||||
|
sess.emit_err(session_diagnostics::InvalidSince { span: attr.span });
|
||||||
|
StableSince::Err
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
sess.emit_err(session_diagnostics::MissingSince { span: attr.span });
|
||||||
|
StableSince::Err
|
||||||
|
};
|
||||||
|
|
||||||
|
match feature {
|
||||||
|
Ok(feature) => {
|
||||||
let level = StabilityLevel::Stable { since, allowed_through_unstable_modules: false };
|
let level = StabilityLevel::Stable { since, allowed_through_unstable_modules: false };
|
||||||
Some((feature, level))
|
Some((feature, level))
|
||||||
}
|
}
|
||||||
(None, _) => {
|
Err(ErrorGuaranteed { .. }) => None,
|
||||||
handle_errors(&sess.parse_sess, attr.span, AttrError::MissingFeature);
|
|
||||||
None
|
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
handle_errors(&sess.parse_sess, attr.span, AttrError::MissingSince);
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -438,23 +404,6 @@ fn parse_stability(sess: &Session, attr: &Attribute) -> Option<(Symbol, Stabilit
|
|||||||
fn parse_unstability(sess: &Session, attr: &Attribute) -> Option<(Symbol, StabilityLevel)> {
|
fn parse_unstability(sess: &Session, attr: &Attribute) -> Option<(Symbol, StabilityLevel)> {
|
||||||
let meta = attr.meta()?;
|
let meta = attr.meta()?;
|
||||||
let MetaItem { kind: MetaItemKind::List(ref metas), .. } = meta else { return None };
|
let MetaItem { kind: MetaItemKind::List(ref metas), .. } = meta else { return None };
|
||||||
let insert_or_error = |meta: &MetaItem, item: &mut Option<Symbol>| {
|
|
||||||
if item.is_some() {
|
|
||||||
handle_errors(
|
|
||||||
&sess.parse_sess,
|
|
||||||
meta.span,
|
|
||||||
AttrError::MultipleItem(pprust::path_to_string(&meta.path)),
|
|
||||||
);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
if let Some(v) = meta.value_str() {
|
|
||||||
*item = Some(v);
|
|
||||||
true
|
|
||||||
} else {
|
|
||||||
sess.emit_err(session_diagnostics::IncorrectMetaItem { span: meta.span });
|
|
||||||
false
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut feature = None;
|
let mut feature = None;
|
||||||
let mut reason = None;
|
let mut reason = None;
|
||||||
@ -464,29 +413,20 @@ fn parse_unstability(sess: &Session, attr: &Attribute) -> Option<(Symbol, Stabil
|
|||||||
let mut implied_by = None;
|
let mut implied_by = None;
|
||||||
for meta in metas {
|
for meta in metas {
|
||||||
let Some(mi) = meta.meta_item() else {
|
let Some(mi) = meta.meta_item() else {
|
||||||
handle_errors(
|
sess.emit_err(session_diagnostics::UnsupportedLiteral {
|
||||||
&sess.parse_sess,
|
span: meta.span(),
|
||||||
meta.span(),
|
reason: UnsupportedLiteralReason::Generic,
|
||||||
AttrError::UnsupportedLiteral(UnsupportedLiteralReason::Generic, false),
|
is_bytestr: false,
|
||||||
);
|
start_point_span: sess.source_map().start_point(meta.span()),
|
||||||
|
});
|
||||||
return None;
|
return None;
|
||||||
};
|
};
|
||||||
|
|
||||||
match mi.name_or_empty() {
|
match mi.name_or_empty() {
|
||||||
sym::feature => {
|
sym::feature => insert_or_error(sess, mi, &mut feature)?,
|
||||||
if !insert_or_error(mi, &mut feature) {
|
sym::reason => insert_or_error(sess, mi, &mut reason)?,
|
||||||
return None;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
sym::reason => {
|
|
||||||
if !insert_or_error(mi, &mut reason) {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
sym::issue => {
|
sym::issue => {
|
||||||
if !insert_or_error(mi, &mut issue) {
|
insert_or_error(sess, mi, &mut issue)?;
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
// These unwraps are safe because `insert_or_error` ensures the meta item
|
// These unwraps are safe because `insert_or_error` ensures the meta item
|
||||||
// is a name/value pair string literal.
|
// is a name/value pair string literal.
|
||||||
@ -515,31 +455,31 @@ fn parse_unstability(sess: &Session, attr: &Attribute) -> Option<(Symbol, Stabil
|
|||||||
}
|
}
|
||||||
is_soft = true;
|
is_soft = true;
|
||||||
}
|
}
|
||||||
sym::implied_by => {
|
sym::implied_by => insert_or_error(sess, mi, &mut implied_by)?,
|
||||||
if !insert_or_error(mi, &mut implied_by) {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => {
|
_ => {
|
||||||
handle_errors(
|
sess.emit_err(session_diagnostics::UnknownMetaItem {
|
||||||
&sess.parse_sess,
|
span: meta.span(),
|
||||||
meta.span(),
|
item: pprust::path_to_string(&mi.path),
|
||||||
AttrError::UnknownMetaItem(
|
expected: &["feature", "reason", "issue", "soft", "implied_by"],
|
||||||
pprust::path_to_string(&mi.path),
|
});
|
||||||
&["feature", "reason", "issue", "soft", "implied_by"],
|
|
||||||
),
|
|
||||||
);
|
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
match (feature, reason, issue) {
|
let feature = match feature {
|
||||||
(Some(feature), reason, Some(_)) => {
|
Some(feature) if rustc_lexer::is_ident(feature.as_str()) => Ok(feature),
|
||||||
if !rustc_lexer::is_ident(feature.as_str()) {
|
Some(_bad_feature) => {
|
||||||
handle_errors(&sess.parse_sess, attr.span, AttrError::NonIdentFeature);
|
Err(sess.emit_err(session_diagnostics::NonIdentFeature { span: attr.span }))
|
||||||
return None;
|
}
|
||||||
}
|
None => Err(sess.emit_err(session_diagnostics::MissingFeature { span: attr.span })),
|
||||||
|
};
|
||||||
|
|
||||||
|
let issue =
|
||||||
|
issue.ok_or_else(|| sess.emit_err(session_diagnostics::MissingIssue { span: attr.span }));
|
||||||
|
|
||||||
|
match (feature, issue) {
|
||||||
|
(Ok(feature), Ok(_)) => {
|
||||||
let level = StabilityLevel::Unstable {
|
let level = StabilityLevel::Unstable {
|
||||||
reason: UnstableReason::from_opt_reason(reason),
|
reason: UnstableReason::from_opt_reason(reason),
|
||||||
issue: issue_num,
|
issue: issue_num,
|
||||||
@ -548,14 +488,7 @@ fn parse_unstability(sess: &Session, attr: &Attribute) -> Option<(Symbol, Stabil
|
|||||||
};
|
};
|
||||||
Some((feature, level))
|
Some((feature, level))
|
||||||
}
|
}
|
||||||
(None, _, _) => {
|
(Err(ErrorGuaranteed { .. }), _) | (_, Err(ErrorGuaranteed { .. })) => None,
|
||||||
handle_errors(&sess.parse_sess, attr.span, AttrError::MissingFeature);
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
sess.emit_err(session_diagnostics::MissingIssue { span: attr.span });
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -587,7 +520,11 @@ pub fn cfg_matches(
|
|||||||
UNEXPECTED_CFGS,
|
UNEXPECTED_CFGS,
|
||||||
cfg.span,
|
cfg.span,
|
||||||
lint_node_id,
|
lint_node_id,
|
||||||
"unexpected `cfg` condition value",
|
if let Some(value) = cfg.value {
|
||||||
|
format!("unexpected `cfg` condition value: `{value}`")
|
||||||
|
} else {
|
||||||
|
format!("unexpected `cfg` condition value: (none)")
|
||||||
|
},
|
||||||
BuiltinLintDiagnostics::UnexpectedCfgValue(
|
BuiltinLintDiagnostics::UnexpectedCfgValue(
|
||||||
(cfg.name, cfg.name_span),
|
(cfg.name, cfg.name_span),
|
||||||
cfg.value.map(|v| (v, cfg.value_span.unwrap())),
|
cfg.value.map(|v| (v, cfg.value_span.unwrap())),
|
||||||
@ -599,7 +536,7 @@ pub fn cfg_matches(
|
|||||||
UNEXPECTED_CFGS,
|
UNEXPECTED_CFGS,
|
||||||
cfg.span,
|
cfg.span,
|
||||||
lint_node_id,
|
lint_node_id,
|
||||||
"unexpected `cfg` condition name",
|
format!("unexpected `cfg` condition name: `{}`", cfg.name),
|
||||||
BuiltinLintDiagnostics::UnexpectedCfgName(
|
BuiltinLintDiagnostics::UnexpectedCfgName(
|
||||||
(cfg.name, cfg.name_span),
|
(cfg.name, cfg.name_span),
|
||||||
cfg.value.map(|v| (v, cfg.value_span.unwrap())),
|
cfg.value.map(|v| (v, cfg.value_span.unwrap())),
|
||||||
@ -627,24 +564,20 @@ fn gate_cfg(gated_cfg: &GatedCfg, cfg_span: Span, sess: &ParseSess, features: &F
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
|
/// Parse a rustc version number written inside string literal in an attribute,
|
||||||
struct Version {
|
/// like appears in `since = "1.0.0"`. Suffixes like "-dev" and "-nightly" are
|
||||||
major: u16,
|
/// not accepted in this position, unlike when parsing CFG_RELEASE.
|
||||||
minor: u16,
|
fn parse_version(s: Symbol) -> Option<RustcVersion> {
|
||||||
patch: u16,
|
let mut components = s.as_str().split('-');
|
||||||
}
|
|
||||||
|
|
||||||
fn parse_version(s: &str, allow_appendix: bool) -> Option<Version> {
|
|
||||||
let mut components = s.split('-');
|
|
||||||
let d = components.next()?;
|
let d = components.next()?;
|
||||||
if !allow_appendix && components.next().is_some() {
|
if components.next().is_some() {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
let mut digits = d.splitn(3, '.');
|
let mut digits = d.splitn(3, '.');
|
||||||
let major = digits.next()?.parse().ok()?;
|
let major = digits.next()?.parse().ok()?;
|
||||||
let minor = digits.next()?.parse().ok()?;
|
let minor = digits.next()?.parse().ok()?;
|
||||||
let patch = digits.next().unwrap_or("0").parse().ok()?;
|
let patch = digits.next().unwrap_or("0").parse().ok()?;
|
||||||
Some(Version { major, minor, patch })
|
Some(RustcVersion { major, minor, patch })
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Evaluate a cfg-like condition (with `any` and `all`), using `eval` to
|
/// Evaluate a cfg-like condition (with `any` and `all`), using `eval` to
|
||||||
@ -676,27 +609,27 @@ pub fn eval_condition(
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let Some(min_version) = parse_version(min_version.as_str(), false) else {
|
let Some(min_version) = parse_version(*min_version) else {
|
||||||
sess.emit_warning(session_diagnostics::UnknownVersionLiteral { span: *span });
|
sess.emit_warning(session_diagnostics::UnknownVersionLiteral { span: *span });
|
||||||
return false;
|
return false;
|
||||||
};
|
};
|
||||||
let rustc_version = parse_version(env!("CFG_RELEASE"), true).unwrap();
|
|
||||||
|
|
||||||
// See https://github.com/rust-lang/rust/issues/64796#issuecomment-640851454 for details
|
// See https://github.com/rust-lang/rust/issues/64796#issuecomment-640851454 for details
|
||||||
if sess.assume_incomplete_release {
|
if sess.assume_incomplete_release {
|
||||||
rustc_version > min_version
|
RustcVersion::CURRENT > min_version
|
||||||
} else {
|
} else {
|
||||||
rustc_version >= min_version
|
RustcVersion::CURRENT >= min_version
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ast::MetaItemKind::List(mis) => {
|
ast::MetaItemKind::List(mis) => {
|
||||||
for mi in mis.iter() {
|
for mi in mis.iter() {
|
||||||
if !mi.is_meta_item() {
|
if !mi.is_meta_item() {
|
||||||
handle_errors(
|
sess.emit_err(session_diagnostics::UnsupportedLiteral {
|
||||||
sess,
|
span: mi.span(),
|
||||||
mi.span(),
|
reason: UnsupportedLiteralReason::Generic,
|
||||||
AttrError::UnsupportedLiteral(UnsupportedLiteralReason::Generic, false),
|
is_bytestr: false,
|
||||||
);
|
start_point_span: sess.source_map().start_point(mi.span()),
|
||||||
|
});
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -729,13 +662,16 @@ pub fn eval_condition(
|
|||||||
!eval_condition(mis[0].meta_item().unwrap(), sess, features, eval)
|
!eval_condition(mis[0].meta_item().unwrap(), sess, features, eval)
|
||||||
}
|
}
|
||||||
sym::target => {
|
sym::target => {
|
||||||
if let Some(features) = features && !features.cfg_target_compact {
|
if let Some(features) = features
|
||||||
|
&& !features.cfg_target_compact
|
||||||
|
{
|
||||||
feature_err(
|
feature_err(
|
||||||
sess,
|
sess,
|
||||||
sym::cfg_target_compact,
|
sym::cfg_target_compact,
|
||||||
cfg.span,
|
cfg.span,
|
||||||
"compact `cfg(target(..))` is experimental and subject to change"
|
"compact `cfg(target(..))` is experimental and subject to change",
|
||||||
).emit();
|
)
|
||||||
|
.emit();
|
||||||
}
|
}
|
||||||
|
|
||||||
mis.iter().fold(true, |res, mi| {
|
mis.iter().fold(true, |res, mi| {
|
||||||
@ -761,14 +697,12 @@ pub fn eval_condition(
|
|||||||
true
|
true
|
||||||
}
|
}
|
||||||
MetaItemKind::NameValue(lit) if !lit.kind.is_str() => {
|
MetaItemKind::NameValue(lit) if !lit.kind.is_str() => {
|
||||||
handle_errors(
|
sess.emit_err(session_diagnostics::UnsupportedLiteral {
|
||||||
sess,
|
span: lit.span,
|
||||||
lit.span,
|
reason: UnsupportedLiteralReason::CfgString,
|
||||||
AttrError::UnsupportedLiteral(
|
is_bytestr: lit.kind.is_bytestr(),
|
||||||
UnsupportedLiteralReason::CfgString,
|
start_point_span: sess.source_map().start_point(lit.span),
|
||||||
lit.kind.is_bytestr(),
|
});
|
||||||
),
|
|
||||||
);
|
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
ast::MetaItemKind::Word | ast::MetaItemKind::NameValue(..) => {
|
ast::MetaItemKind::Word | ast::MetaItemKind::NameValue(..) => {
|
||||||
@ -786,17 +720,49 @@ pub fn eval_condition(
|
|||||||
|
|
||||||
#[derive(Copy, Debug, Encodable, Decodable, Clone, HashStable_Generic)]
|
#[derive(Copy, Debug, Encodable, Decodable, Clone, HashStable_Generic)]
|
||||||
pub struct Deprecation {
|
pub struct Deprecation {
|
||||||
pub since: Option<Symbol>,
|
pub since: DeprecatedSince,
|
||||||
/// The note to issue a reason.
|
/// The note to issue a reason.
|
||||||
pub note: Option<Symbol>,
|
pub note: Option<Symbol>,
|
||||||
/// A text snippet used to completely replace any use of the deprecated item in an expression.
|
/// A text snippet used to completely replace any use of the deprecated item in an expression.
|
||||||
///
|
///
|
||||||
/// This is currently unstable.
|
/// This is currently unstable.
|
||||||
pub suggestion: Option<Symbol>,
|
pub suggestion: Option<Symbol>,
|
||||||
|
}
|
||||||
|
|
||||||
/// Whether to treat the since attribute as being a Rust version identifier
|
/// Release in which an API is deprecated.
|
||||||
/// (rather than an opaque string).
|
#[derive(Copy, Debug, Encodable, Decodable, Clone, HashStable_Generic)]
|
||||||
pub is_since_rustc_version: bool,
|
pub enum DeprecatedSince {
|
||||||
|
RustcVersion(RustcVersion),
|
||||||
|
/// Deprecated in the future ("to be determined").
|
||||||
|
Future,
|
||||||
|
/// `feature(staged_api)` is off. Deprecation versions outside the standard
|
||||||
|
/// library are allowed to be arbitrary strings, for better or worse.
|
||||||
|
NonStandard(Symbol),
|
||||||
|
/// Deprecation version is unspecified but optional.
|
||||||
|
Unspecified,
|
||||||
|
/// Failed to parse a deprecation version, or the deprecation version is
|
||||||
|
/// unspecified and required. An error has already been emitted.
|
||||||
|
Err,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Deprecation {
|
||||||
|
/// Whether an item marked with #[deprecated(since = "X")] is currently
|
||||||
|
/// deprecated (i.e., whether X is not greater than the current rustc
|
||||||
|
/// version).
|
||||||
|
pub fn is_in_effect(&self) -> bool {
|
||||||
|
match self.since {
|
||||||
|
DeprecatedSince::RustcVersion(since) => since <= RustcVersion::CURRENT,
|
||||||
|
DeprecatedSince::Future => false,
|
||||||
|
// The `since` field doesn't have semantic purpose without `#![staged_api]`.
|
||||||
|
DeprecatedSince::NonStandard(_) => true,
|
||||||
|
// Assume deprecation is in effect if "since" field is absent or invalid.
|
||||||
|
DeprecatedSince::Unspecified | DeprecatedSince::Err => true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_since_rustc_version(&self) -> bool {
|
||||||
|
matches!(self.since, DeprecatedSince::RustcVersion(_))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Finds the deprecation attribute. `None` if none exists.
|
/// Finds the deprecation attribute. `None` if none exists.
|
||||||
@ -825,11 +791,10 @@ pub fn find_deprecation(
|
|||||||
MetaItemKind::List(list) => {
|
MetaItemKind::List(list) => {
|
||||||
let get = |meta: &MetaItem, item: &mut Option<Symbol>| {
|
let get = |meta: &MetaItem, item: &mut Option<Symbol>| {
|
||||||
if item.is_some() {
|
if item.is_some() {
|
||||||
handle_errors(
|
sess.emit_err(session_diagnostics::MultipleItem {
|
||||||
&sess.parse_sess,
|
span: meta.span,
|
||||||
meta.span,
|
item: pprust::path_to_string(&meta.path),
|
||||||
AttrError::MultipleItem(pprust::path_to_string(&meta.path)),
|
});
|
||||||
);
|
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
if let Some(v) = meta.value_str() {
|
if let Some(v) = meta.value_str() {
|
||||||
@ -837,16 +802,14 @@ pub fn find_deprecation(
|
|||||||
true
|
true
|
||||||
} else {
|
} else {
|
||||||
if let Some(lit) = meta.name_value_literal() {
|
if let Some(lit) = meta.name_value_literal() {
|
||||||
handle_errors(
|
sess.emit_err(session_diagnostics::UnsupportedLiteral {
|
||||||
&sess.parse_sess,
|
span: lit.span,
|
||||||
lit.span,
|
reason: UnsupportedLiteralReason::DeprecatedString,
|
||||||
AttrError::UnsupportedLiteral(
|
is_bytestr: lit.kind.is_bytestr(),
|
||||||
UnsupportedLiteralReason::DeprecatedString,
|
start_point_span: sess.source_map().start_point(lit.span),
|
||||||
lit.kind.is_bytestr(),
|
});
|
||||||
),
|
|
||||||
);
|
|
||||||
} else {
|
} else {
|
||||||
sess.emit_err(session_diagnostics::IncorrectMetaItem2 {
|
sess.emit_err(session_diagnostics::IncorrectMetaItem {
|
||||||
span: meta.span,
|
span: meta.span,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@ -882,30 +845,25 @@ pub fn find_deprecation(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
handle_errors(
|
sess.emit_err(session_diagnostics::UnknownMetaItem {
|
||||||
&sess.parse_sess,
|
span: meta.span(),
|
||||||
meta.span(),
|
item: pprust::path_to_string(&mi.path),
|
||||||
AttrError::UnknownMetaItem(
|
expected: if features.deprecated_suggestion {
|
||||||
pprust::path_to_string(&mi.path),
|
&["since", "note", "suggestion"]
|
||||||
if features.deprecated_suggestion {
|
} else {
|
||||||
&["since", "note", "suggestion"]
|
&["since", "note"]
|
||||||
} else {
|
},
|
||||||
&["since", "note"]
|
});
|
||||||
},
|
|
||||||
),
|
|
||||||
);
|
|
||||||
continue 'outer;
|
continue 'outer;
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
NestedMetaItem::Lit(lit) => {
|
NestedMetaItem::Lit(lit) => {
|
||||||
handle_errors(
|
sess.emit_err(session_diagnostics::UnsupportedLiteral {
|
||||||
&sess.parse_sess,
|
span: lit.span,
|
||||||
lit.span,
|
reason: UnsupportedLiteralReason::DeprecatedKvPair,
|
||||||
AttrError::UnsupportedLiteral(
|
is_bytestr: false,
|
||||||
UnsupportedLiteralReason::DeprecatedKvPair,
|
start_point_span: sess.source_map().start_point(lit.span),
|
||||||
false,
|
});
|
||||||
),
|
|
||||||
);
|
|
||||||
continue 'outer;
|
continue 'outer;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -913,22 +871,30 @@ pub fn find_deprecation(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if is_rustc {
|
let since = if let Some(since) = since {
|
||||||
if since.is_none() {
|
if since.as_str() == "TBD" {
|
||||||
handle_errors(&sess.parse_sess, attr.span, AttrError::MissingSince);
|
DeprecatedSince::Future
|
||||||
continue;
|
} else if !is_rustc {
|
||||||
|
DeprecatedSince::NonStandard(since)
|
||||||
|
} else if let Some(version) = parse_version(since) {
|
||||||
|
DeprecatedSince::RustcVersion(version)
|
||||||
|
} else {
|
||||||
|
sess.emit_err(session_diagnostics::InvalidSince { span: attr.span });
|
||||||
|
DeprecatedSince::Err
|
||||||
}
|
}
|
||||||
|
} else if is_rustc {
|
||||||
|
sess.emit_err(session_diagnostics::MissingSince { span: attr.span });
|
||||||
|
DeprecatedSince::Err
|
||||||
|
} else {
|
||||||
|
DeprecatedSince::Unspecified
|
||||||
|
};
|
||||||
|
|
||||||
if note.is_none() {
|
if is_rustc && note.is_none() {
|
||||||
sess.emit_err(session_diagnostics::MissingNote { span: attr.span });
|
sess.emit_err(session_diagnostics::MissingNote { span: attr.span });
|
||||||
continue;
|
continue;
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
depr = Some((
|
depr = Some((Deprecation { since, note, suggestion }, attr.span));
|
||||||
Deprecation { since, note, suggestion, is_since_rustc_version: is_rustc },
|
|
||||||
attr.span,
|
|
||||||
));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
depr
|
depr
|
||||||
|
|||||||
@ -4,6 +4,9 @@
|
|||||||
//! The goal is to move the definition of `MetaItem` and things that don't need to be in `syntax`
|
//! The goal is to move the definition of `MetaItem` and things that don't need to be in `syntax`
|
||||||
//! to this crate.
|
//! to this crate.
|
||||||
|
|
||||||
|
#![cfg_attr(not(bootstrap), allow(internal_features))]
|
||||||
|
#![cfg_attr(not(bootstrap), feature(rustdoc_internals))]
|
||||||
|
#![cfg_attr(not(bootstrap), doc(rust_logo))]
|
||||||
#![feature(let_chains)]
|
#![feature(let_chains)]
|
||||||
#![deny(rustc::untranslatable_diagnostic)]
|
#![deny(rustc::untranslatable_diagnostic)]
|
||||||
#![deny(rustc::diagnostic_outside_of_impl)]
|
#![deny(rustc::diagnostic_outside_of_impl)]
|
||||||
@ -24,6 +27,6 @@ pub use StabilityLevel::*;
|
|||||||
|
|
||||||
pub use rustc_ast::attr::*;
|
pub use rustc_ast::attr::*;
|
||||||
|
|
||||||
pub(crate) use rustc_ast::HashStableContext;
|
pub(crate) use rustc_session::HashStableContext;
|
||||||
|
|
||||||
fluent_messages! { "../messages.ftl" }
|
fluent_messages! { "../messages.ftl" }
|
||||||
|
|||||||
@ -165,15 +165,6 @@ pub(crate) struct MissingIssue {
|
|||||||
pub span: Span,
|
pub span: Span,
|
||||||
}
|
}
|
||||||
|
|
||||||
// FIXME: This diagnostic is identical to `IncorrectMetaItem`, barring the error code. Consider
|
|
||||||
// changing this to `IncorrectMetaItem`. See #51489.
|
|
||||||
#[derive(Diagnostic)]
|
|
||||||
#[diag(attr_incorrect_meta_item, code = "E0551")]
|
|
||||||
pub(crate) struct IncorrectMetaItem2 {
|
|
||||||
#[primary_span]
|
|
||||||
pub span: Span,
|
|
||||||
}
|
|
||||||
|
|
||||||
// FIXME: Why is this the same error code as `InvalidReprHintNoParen` and `InvalidReprHintNoValue`?
|
// FIXME: Why is this the same error code as `InvalidReprHintNoParen` and `InvalidReprHintNoValue`?
|
||||||
// It is more similar to `IncorrectReprFormatGeneric`.
|
// It is more similar to `IncorrectReprFormatGeneric`.
|
||||||
#[derive(Diagnostic)]
|
#[derive(Diagnostic)]
|
||||||
@ -379,6 +370,13 @@ pub(crate) struct ExpectsFeatures {
|
|||||||
pub name: String,
|
pub name: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Diagnostic)]
|
||||||
|
#[diag(attr_invalid_since)]
|
||||||
|
pub(crate) struct InvalidSince {
|
||||||
|
#[primary_span]
|
||||||
|
pub span: Span,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Diagnostic)]
|
#[derive(Diagnostic)]
|
||||||
#[diag(attr_soft_no_args)]
|
#[diag(attr_soft_no_args)]
|
||||||
pub(crate) struct SoftNoArgs {
|
pub(crate) struct SoftNoArgs {
|
||||||
|
|||||||
@ -4,11 +4,16 @@ version = "0.0.0"
|
|||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
# tidy-alphabetical-start
|
||||||
icu_list = "1.2"
|
icu_list = "1.2"
|
||||||
icu_locid = "1.2"
|
icu_locid = "1.2"
|
||||||
|
icu_locid_transform = "1.3.2"
|
||||||
icu_provider = "1.2"
|
icu_provider = "1.2"
|
||||||
icu_provider_adapters = "1.2"
|
icu_provider_adapters = "1.2"
|
||||||
zerovec = "0.9.4"
|
zerovec = "0.10.0"
|
||||||
|
# tidy-alphabetical-end
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
|
# tidy-alphabetical-start
|
||||||
rustc_use_parallel_compiler = ['icu_provider/sync']
|
rustc_use_parallel_compiler = ['icu_provider/sync']
|
||||||
|
# tidy-alphabetical-end
|
||||||
|
|||||||
@ -1,6 +0,0 @@
|
|||||||
// @generated
|
|
||||||
type DataStruct = < :: icu_provider_adapters :: fallback :: provider :: LocaleFallbackLikelySubtagsV1Marker as :: icu_provider :: DataMarker > :: Yokeable ;
|
|
||||||
pub fn lookup(locale: &icu_provider::DataLocale) -> Option<&'static DataStruct> {
|
|
||||||
locale.is_empty().then(|| &UND)
|
|
||||||
}
|
|
||||||
static UND: DataStruct = include!("und.rs.data");
|
|
||||||
@ -1,66 +0,0 @@
|
|||||||
::icu_provider_adapters::fallback::provider::LocaleFallbackLikelySubtagsV1 {
|
|
||||||
l2s: unsafe {
|
|
||||||
#[allow(unused_unsafe)]
|
|
||||||
::zerovec::ZeroMap::from_parts_unchecked(
|
|
||||||
unsafe {
|
|
||||||
:: zerovec :: ZeroVec :: from_bytes_unchecked (b"am\0ar\0as\0be\0bg\0bgcbhobn\0brxchrcv\0doiel\0fa\0gu\0he\0hi\0hy\0ja\0ka\0kk\0km\0kn\0ko\0kokks\0ky\0lo\0maimk\0ml\0mn\0mnimr\0my\0ne\0or\0pa\0ps\0rajru\0sa\0satsd\0si\0sr\0ta\0te\0tg\0th\0ti\0tt\0uk\0ur\0yuezh\0")
|
|
||||||
},
|
|
||||||
unsafe {
|
|
||||||
:: zerovec :: ZeroVec :: from_bytes_unchecked (b"EthiArabBengCyrlCyrlDevaDevaBengDevaCherCyrlDevaGrekArabGujrHebrDevaArmnJpanGeorCyrlKhmrKndaKoreDevaArabCyrlLaooDevaCyrlMlymCyrlBengDevaMymrDevaOryaGuruArabDevaCyrlDevaOlckArabSinhCyrlTamlTeluCyrlThaiEthiCyrlCyrlArabHantHans")
|
|
||||||
},
|
|
||||||
)
|
|
||||||
},
|
|
||||||
lr2s: unsafe {
|
|
||||||
#[allow(unused_unsafe)]
|
|
||||||
::zerovec::ZeroMap2d::from_parts_unchecked(
|
|
||||||
unsafe {
|
|
||||||
::zerovec::ZeroVec::from_bytes_unchecked(
|
|
||||||
b"az\0ha\0kk\0ky\0mn\0ms\0pa\0sd\0sr\0tg\0uz\0yuezh\0",
|
|
||||||
)
|
|
||||||
},
|
|
||||||
unsafe {
|
|
||||||
:: zerovec :: ZeroVec :: from_bytes_unchecked (b"\x03\0\0\0\x05\0\0\0\t\0\0\0\x0B\0\0\0\x0C\0\0\0\r\0\0\0\x0E\0\0\0\x0F\0\0\0\x13\0\0\0\x14\0\0\0\x16\0\0\0\x17\0\0\0&\0\0\0")
|
|
||||||
},
|
|
||||||
unsafe {
|
|
||||||
:: zerovec :: ZeroVec :: from_bytes_unchecked (b"IQ\0IR\0RU\0CM\0SD\0AF\0CN\0IR\0MN\0CN\0TR\0CN\0CC\0PK\0IN\0ME\0RO\0RU\0TR\0PK\0AF\0CN\0CN\0AU\0BN\0GB\0GF\0HK\0ID\0MO\0PA\0PF\0PH\0SR\0TH\0TW\0US\0VN\0")
|
|
||||||
},
|
|
||||||
unsafe {
|
|
||||||
:: zerovec :: ZeroVec :: from_bytes_unchecked (b"ArabArabCyrlArabArabArabArabArabArabArabLatnMongArabArabDevaLatnLatnLatnLatnArabArabCyrlHansHantHantHantHantHantHantHantHantHantHantHantHantHantHantHant")
|
|
||||||
},
|
|
||||||
)
|
|
||||||
},
|
|
||||||
l2r: unsafe {
|
|
||||||
#[allow(unused_unsafe)]
|
|
||||||
::zerovec::ZeroMap::from_parts_unchecked(
|
|
||||||
unsafe {
|
|
||||||
:: zerovec :: ZeroVec :: from_bytes_unchecked (b"af\0am\0ar\0as\0astaz\0be\0bg\0bgcbhobn\0br\0brxbs\0ca\0cebchrcs\0cv\0cy\0da\0de\0doidsbel\0en\0es\0et\0eu\0fa\0ff\0fi\0filfo\0fr\0ga\0gd\0gl\0gu\0ha\0he\0hi\0hr\0hsbhu\0hy\0ia\0id\0ig\0is\0it\0ja\0jv\0ka\0keakgpkk\0km\0kn\0ko\0kokks\0ky\0lo\0lt\0lv\0maimi\0mk\0ml\0mn\0mnimr\0ms\0my\0ne\0nl\0nn\0no\0or\0pa\0pcmpl\0ps\0pt\0qu\0rajrm\0ro\0ru\0sa\0satsc\0sd\0si\0sk\0sl\0so\0sq\0sr\0su\0sv\0sw\0ta\0te\0tg\0th\0ti\0tk\0to\0tr\0tt\0uk\0ur\0uz\0vi\0wo\0xh\0yo\0yrlyuezh\0zu\0")
|
|
||||||
},
|
|
||||||
unsafe {
|
|
||||||
:: zerovec :: ZeroVec :: from_bytes_unchecked (b"ZA\0ET\0EG\0IN\0ES\0AZ\0BY\0BG\0IN\0IN\0BD\0FR\0IN\0BA\0ES\0PH\0US\0CZ\0RU\0GB\0DK\0DE\0IN\0DE\0GR\0US\0ES\0EE\0ES\0IR\0SN\0FI\0PH\0FO\0FR\0IE\0GB\0ES\0IN\0NG\0IL\0IN\0HR\0DE\0HU\0AM\x00001ID\0NG\0IS\0IT\0JP\0ID\0GE\0CV\0BR\0KZ\0KH\0IN\0KR\0IN\0IN\0KG\0LA\0LT\0LV\0IN\0NZ\0MK\0IN\0MN\0IN\0IN\0MY\0MM\0NP\0NL\0NO\0NO\0IN\0IN\0NG\0PL\0AF\0BR\0PE\0IN\0CH\0RO\0RU\0IN\0IN\0IT\0PK\0LK\0SK\0SI\0SO\0AL\0RS\0ID\0SE\0TZ\0IN\0IN\0TJ\0TH\0ET\0TM\0TO\0TR\0RU\0UA\0PK\0UZ\0VN\0SN\0ZA\0NG\0BR\0HK\0CN\0ZA\0")
|
|
||||||
},
|
|
||||||
)
|
|
||||||
},
|
|
||||||
ls2r: unsafe {
|
|
||||||
#[allow(unused_unsafe)]
|
|
||||||
::zerovec::ZeroMap2d::from_parts_unchecked(
|
|
||||||
unsafe {
|
|
||||||
::zerovec::ZeroVec::from_bytes_unchecked(
|
|
||||||
b"az\0en\0ff\0kk\0ky\0mn\0pa\0sd\0tg\0uz\0yuezh\0",
|
|
||||||
)
|
|
||||||
},
|
|
||||||
unsafe {
|
|
||||||
:: zerovec :: ZeroVec :: from_bytes_unchecked (b"\x01\0\0\0\x02\0\0\0\x03\0\0\0\x04\0\0\0\x06\0\0\0\x07\0\0\0\x08\0\0\0\x0B\0\0\0\x0C\0\0\0\r\0\0\0\x0E\0\0\0\x11\0\0\0")
|
|
||||||
},
|
|
||||||
unsafe {
|
|
||||||
::zerovec::ZeroVec::from_bytes_unchecked(
|
|
||||||
b"ArabShawAdlmArabArabLatnMongArabDevaKhojSindArabArabHansBopoHanbHant",
|
|
||||||
)
|
|
||||||
},
|
|
||||||
unsafe {
|
|
||||||
::zerovec::ZeroVec::from_bytes_unchecked(
|
|
||||||
b"IR\0GB\0GN\0CN\0CN\0TR\0CN\0PK\0IN\0IN\0IN\0PK\0AF\0CN\0TW\0TW\0TW\0",
|
|
||||||
)
|
|
||||||
},
|
|
||||||
)
|
|
||||||
},
|
|
||||||
}
|
|
||||||
@ -1,4 +0,0 @@
|
|||||||
// @generated
|
|
||||||
pub mod likelysubtags_v1;
|
|
||||||
pub mod parents_v1;
|
|
||||||
pub mod supplement;
|
|
||||||
@ -1,6 +0,0 @@
|
|||||||
// @generated
|
|
||||||
type DataStruct = < :: icu_provider_adapters :: fallback :: provider :: LocaleFallbackParentsV1Marker as :: icu_provider :: DataMarker > :: Yokeable ;
|
|
||||||
pub fn lookup(locale: &icu_provider::DataLocale) -> Option<&'static DataStruct> {
|
|
||||||
locale.is_empty().then(|| &UND)
|
|
||||||
}
|
|
||||||
static UND: DataStruct = include!("und.rs.data");
|
|
||||||
@ -1,13 +0,0 @@
|
|||||||
::icu_provider_adapters::fallback::provider::LocaleFallbackParentsV1 {
|
|
||||||
parents: unsafe {
|
|
||||||
#[allow(unused_unsafe)]
|
|
||||||
::zerovec::ZeroMap::from_parts_unchecked(
|
|
||||||
unsafe {
|
|
||||||
:: zerovec :: VarZeroVec :: from_bytes_unchecked (b"\x84\0\0\0\0\0\x06\0\x0B\0\x10\0\x15\0\x1A\0\x1F\0$\0)\0.\x003\08\0=\0B\0G\0L\0Q\0V\0[\0`\0e\0j\0o\0t\0y\0~\0\x83\0\x88\0\x8D\0\x92\0\x97\0\x9C\0\xA1\0\xA6\0\xAB\0\xB0\0\xB5\0\xBA\0\xBF\0\xC4\0\xC9\0\xCE\0\xD3\0\xD8\0\xDD\0\xE2\0\xE7\0\xEC\0\xF1\0\xF6\0\xFB\0\0\x01\x05\x01\n\x01\x0F\x01\x14\x01\x19\x01\x1E\x01#\x01(\x01-\x012\x017\x01<\x01A\x01F\x01K\x01P\x01U\x01Z\x01_\x01d\x01i\x01n\x01s\x01x\x01}\x01\x82\x01\x87\x01\x8C\x01\x91\x01\x96\x01\x9B\x01\xA0\x01\xA5\x01\xAA\x01\xAF\x01\xB4\x01\xB9\x01\xBE\x01\xC3\x01\xC8\x01\xCD\x01\xD2\x01\xD7\x01\xDC\x01\xE1\x01\xE6\x01\xEB\x01\xF0\x01\xF5\x01\xFA\x01\xFF\x01\x04\x02\t\x02\x0E\x02\x13\x02\x18\x02\x1D\x02\"\x02'\x02,\x021\x026\x02;\x02@\x02G\x02I\x02K\x02M\x02R\x02W\x02\\\x02a\x02f\x02k\x02p\x02u\x02z\x02\x7F\x02\x84\x02\x89\x02en-150en-AGen-AIen-ATen-AUen-BBen-BEen-BMen-BSen-BWen-BZen-CCen-CHen-CKen-CMen-CXen-CYen-DEen-DGen-DKen-DMen-ERen-FIen-FJen-FKen-FMen-GBen-GDen-GGen-GHen-GIen-GMen-GYen-HKen-IEen-ILen-IMen-INen-IOen-JEen-JMen-KEen-KIen-KNen-KYen-LCen-LRen-LSen-MGen-MOen-MSen-MTen-MUen-MVen-MWen-MYen-NAen-NFen-NGen-NLen-NRen-NUen-NZen-PGen-PKen-PNen-PWen-RWen-SBen-SCen-SDen-SEen-SGen-SHen-SIen-SLen-SSen-SXen-SZen-TCen-TKen-TOen-TTen-TVen-TZen-UGen-VCen-VGen-VUen-WSen-ZAen-ZMen-ZWes-ARes-BOes-BRes-BZes-CLes-COes-CRes-CUes-DOes-ECes-GTes-HNes-MXes-NIes-PAes-PEes-PRes-PYes-SVes-USes-UYes-VEhi-Latnhtnbnnno-NOpt-AOpt-CHpt-CVpt-FRpt-GQpt-GWpt-LUpt-MOpt-MZpt-STpt-TLzh-Hant-MO")
|
|
||||||
},
|
|
||||||
unsafe {
|
|
||||||
:: zerovec :: ZeroVec :: from_bytes_unchecked (b"en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419en\0\0\0\0\0\0\x01IN\0fr\0\0\0\0\0\0\x01HT\0no\0\0\0\0\0\0\0\0\0\0no\0\0\0\0\0\0\0\0\0\0no\0\0\0\0\0\0\0\0\0\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0zh\0\x01Hant\x01HK\0")
|
|
||||||
},
|
|
||||||
)
|
|
||||||
},
|
|
||||||
}
|
|
||||||
@ -1,6 +0,0 @@
|
|||||||
// @generated
|
|
||||||
type DataStruct = < :: icu_provider_adapters :: fallback :: provider :: CollationFallbackSupplementV1Marker as :: icu_provider :: DataMarker > :: Yokeable ;
|
|
||||||
pub fn lookup(locale: &icu_provider::DataLocale) -> Option<&'static DataStruct> {
|
|
||||||
locale.is_empty().then(|| &UND)
|
|
||||||
}
|
|
||||||
static UND: DataStruct = include!("und.rs.data");
|
|
||||||
@ -1,22 +0,0 @@
|
|||||||
::icu_provider_adapters::fallback::provider::LocaleFallbackSupplementV1 {
|
|
||||||
parents: unsafe {
|
|
||||||
#[allow(unused_unsafe)]
|
|
||||||
::zerovec::ZeroMap::from_parts_unchecked(
|
|
||||||
unsafe { ::zerovec::VarZeroVec::from_bytes_unchecked(b"\x01\0\0\0\0\0yue") },
|
|
||||||
unsafe { ::zerovec::ZeroVec::from_bytes_unchecked(b"zh\0\x01Hant\0\0\0\0") },
|
|
||||||
)
|
|
||||||
},
|
|
||||||
unicode_extension_defaults: unsafe {
|
|
||||||
#[allow(unused_unsafe)]
|
|
||||||
::zerovec::ZeroMap2d::from_parts_unchecked(
|
|
||||||
unsafe { ::zerovec::ZeroVec::from_bytes_unchecked(b"co") },
|
|
||||||
unsafe { ::zerovec::ZeroVec::from_bytes_unchecked(b"\x02\0\0\0") },
|
|
||||||
unsafe {
|
|
||||||
::zerovec::VarZeroVec::from_bytes_unchecked(b"\x02\0\0\0\0\0\x02\0zhzh-Hant")
|
|
||||||
},
|
|
||||||
unsafe {
|
|
||||||
::zerovec::VarZeroVec::from_bytes_unchecked(b"\x02\0\0\0\0\0\x06\0pinyinstroke")
|
|
||||||
},
|
|
||||||
)
|
|
||||||
},
|
|
||||||
}
|
|
||||||
@ -1,2 +0,0 @@
|
|||||||
// @generated
|
|
||||||
pub mod co_v1;
|
|
||||||
@ -1,50 +0,0 @@
|
|||||||
::icu_list::provider::ListFormatterPatternsV1([
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", and ", 6u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(" and ", 5u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", & ", 4u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(" & ", 3u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
])
|
|
||||||
@ -1,116 +0,0 @@
|
|||||||
::icu_list::provider::ListFormatterPatternsV1([
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(" y ", 3u8),
|
|
||||||
special_case: Some(::icu_list::provider::SpecialCasePattern {
|
|
||||||
condition: unsafe {
|
|
||||||
::icu_list::provider::SerdeDFA::from_dfa_bytes_unchecked(
|
|
||||||
if cfg!(target_endian = "little") {
|
|
||||||
b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B(\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0"
|
|
||||||
} else {
|
|
||||||
b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\0\0\x01(\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#"
|
|
||||||
},
|
|
||||||
)
|
|
||||||
},
|
|
||||||
pattern: ::icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8),
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(" y ", 3u8),
|
|
||||||
special_case: Some(::icu_list::provider::SpecialCasePattern {
|
|
||||||
condition: unsafe {
|
|
||||||
::icu_list::provider::SerdeDFA::from_dfa_bytes_unchecked(
|
|
||||||
if cfg!(target_endian = "little") {
|
|
||||||
b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B(\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0"
|
|
||||||
} else {
|
|
||||||
b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\0\0\x01(\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#"
|
|
||||||
},
|
|
||||||
)
|
|
||||||
},
|
|
||||||
pattern: ::icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8),
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(" y ", 3u8),
|
|
||||||
special_case: Some(::icu_list::provider::SpecialCasePattern {
|
|
||||||
condition: unsafe {
|
|
||||||
::icu_list::provider::SerdeDFA::from_dfa_bytes_unchecked(
|
|
||||||
if cfg!(target_endian = "little") {
|
|
||||||
b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B(\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0"
|
|
||||||
} else {
|
|
||||||
b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\0\0\x01(\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#"
|
|
||||||
},
|
|
||||||
)
|
|
||||||
},
|
|
||||||
pattern: ::icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8),
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(" y ", 3u8),
|
|
||||||
special_case: Some(::icu_list::provider::SpecialCasePattern {
|
|
||||||
condition: unsafe {
|
|
||||||
::icu_list::provider::SerdeDFA::from_dfa_bytes_unchecked(
|
|
||||||
if cfg!(target_endian = "little") {
|
|
||||||
b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B(\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0"
|
|
||||||
} else {
|
|
||||||
b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\0\0\x01(\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#"
|
|
||||||
},
|
|
||||||
)
|
|
||||||
},
|
|
||||||
pattern: ::icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8),
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(" y ", 3u8),
|
|
||||||
special_case: Some(::icu_list::provider::SpecialCasePattern {
|
|
||||||
condition: unsafe {
|
|
||||||
::icu_list::provider::SerdeDFA::from_dfa_bytes_unchecked(
|
|
||||||
if cfg!(target_endian = "little") {
|
|
||||||
b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B(\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0"
|
|
||||||
} else {
|
|
||||||
b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\0\0\x01(\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#"
|
|
||||||
},
|
|
||||||
)
|
|
||||||
},
|
|
||||||
pattern: ::icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8),
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(" y ", 3u8),
|
|
||||||
special_case: Some(::icu_list::provider::SpecialCasePattern {
|
|
||||||
condition: unsafe {
|
|
||||||
::icu_list::provider::SerdeDFA::from_dfa_bytes_unchecked(
|
|
||||||
if cfg!(target_endian = "little") {
|
|
||||||
b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B(\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0"
|
|
||||||
} else {
|
|
||||||
b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\0\0\x01(\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#"
|
|
||||||
},
|
|
||||||
)
|
|
||||||
},
|
|
||||||
pattern: ::icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8),
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
])
|
|
||||||
@ -1,50 +0,0 @@
|
|||||||
::icu_list::provider::ListFormatterPatternsV1([
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(" et ", 4u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(" et ", 4u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(" et ", 4u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(" et ", 4u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
])
|
|
||||||
@ -1,50 +0,0 @@
|
|||||||
::icu_list::provider::ListFormatterPatternsV1([
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
])
|
|
||||||
@ -1,50 +0,0 @@
|
|||||||
::icu_list::provider::ListFormatterPatternsV1([
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
])
|
|
||||||
@ -1,22 +0,0 @@
|
|||||||
// @generated
|
|
||||||
type DataStruct = <::icu_list::provider::AndListV1Marker as ::icu_provider::DataMarker>::Yokeable;
|
|
||||||
pub fn lookup(locale: &icu_provider::DataLocale) -> Option<&'static DataStruct> {
|
|
||||||
static KEYS: [&str; 12usize] =
|
|
||||||
["en", "es", "fr", "it", "ja", "pt", "ru", "tr", "und", "zh", "zh-Hans", "zh-Hant"];
|
|
||||||
static DATA: [&DataStruct; 12usize] =
|
|
||||||
[&EN, &ES, &FR, &IT, &JA, &PT, &RU, &TR, &UND, &ZH, &ZH, &ZH_HANT];
|
|
||||||
KEYS.binary_search_by(|k| locale.strict_cmp(k.as_bytes()).reverse())
|
|
||||||
.ok()
|
|
||||||
.map(|i| unsafe { *DATA.get_unchecked(i) })
|
|
||||||
}
|
|
||||||
static EN: DataStruct = include!("en.rs.data");
|
|
||||||
static ES: DataStruct = include!("es.rs.data");
|
|
||||||
static FR: DataStruct = include!("fr.rs.data");
|
|
||||||
static IT: DataStruct = include!("it.rs.data");
|
|
||||||
static JA: DataStruct = include!("ja.rs.data");
|
|
||||||
static PT: DataStruct = include!("pt.rs.data");
|
|
||||||
static RU: DataStruct = include!("ru.rs.data");
|
|
||||||
static TR: DataStruct = include!("tr.rs.data");
|
|
||||||
static UND: DataStruct = include!("und.rs.data");
|
|
||||||
static ZH_HANT: DataStruct = include!("zh-Hant.rs.data");
|
|
||||||
static ZH: DataStruct = include!("zh.rs.data");
|
|
||||||
@ -1,50 +0,0 @@
|
|||||||
::icu_list::provider::ListFormatterPatternsV1([
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
])
|
|
||||||
@ -1,50 +0,0 @@
|
|||||||
::icu_list::provider::ListFormatterPatternsV1([
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(" и ", 4u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(" и ", 4u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(" и ", 4u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(" и ", 4u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
])
|
|
||||||
@ -1,50 +0,0 @@
|
|||||||
::icu_list::provider::ListFormatterPatternsV1([
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(" ve ", 4u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(" ve ", 4u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(" ve ", 4u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(" ve ", 4u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
])
|
|
||||||
@ -1,50 +0,0 @@
|
|||||||
::icu_list::provider::ListFormatterPatternsV1([
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
])
|
|
||||||
@ -1,50 +0,0 @@
|
|||||||
::icu_list::provider::ListFormatterPatternsV1([
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts("和", 3u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts("和", 3u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts("和", 3u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts("和", 3u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts("和", 3u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts("和", 3u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
])
|
|
||||||
@ -1,50 +0,0 @@
|
|||||||
::icu_list::provider::ListFormatterPatternsV1([
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts("和", 3u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts("和", 3u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts("和", 3u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts("和", 3u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
::icu_list::provider::ConditionalListJoinerPattern {
|
|
||||||
default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8),
|
|
||||||
special_case: None,
|
|
||||||
},
|
|
||||||
])
|
|
||||||
@ -1,2 +0,0 @@
|
|||||||
// @generated
|
|
||||||
pub mod and_v1;
|
|
||||||
46
compiler/rustc_baked_icu_data/src/data/macros.rs
Normal file
46
compiler/rustc_baked_icu_data/src/data/macros.rs
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
// @generated
|
||||||
|
/// Marks a type as a data provider. You can then use macros like
|
||||||
|
/// `impl_core_helloworld_v1` to add implementations.
|
||||||
|
///
|
||||||
|
/// ```ignore
|
||||||
|
/// struct MyProvider;
|
||||||
|
/// const _: () = {
|
||||||
|
/// include!("path/to/generated/macros.rs");
|
||||||
|
/// make_provider!(MyProvider);
|
||||||
|
/// impl_core_helloworld_v1!(MyProvider);
|
||||||
|
/// }
|
||||||
|
/// ```
|
||||||
|
#[doc(hidden)]
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! __make_provider {
|
||||||
|
($ name : ty) => {
|
||||||
|
#[clippy::msrv = "1.66"]
|
||||||
|
impl $name {
|
||||||
|
#[doc(hidden)]
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub const MUST_USE_MAKE_PROVIDER_MACRO: () = ();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
#[doc(inline)]
|
||||||
|
pub use __make_provider as make_provider;
|
||||||
|
#[macro_use]
|
||||||
|
#[path = "macros/fallback_likelysubtags_v1.data.rs"]
|
||||||
|
mod fallback_likelysubtags_v1;
|
||||||
|
#[doc(inline)]
|
||||||
|
pub use __impl_fallback_likelysubtags_v1 as impl_fallback_likelysubtags_v1;
|
||||||
|
#[macro_use]
|
||||||
|
#[path = "macros/fallback_parents_v1.data.rs"]
|
||||||
|
mod fallback_parents_v1;
|
||||||
|
#[doc(inline)]
|
||||||
|
pub use __impl_fallback_parents_v1 as impl_fallback_parents_v1;
|
||||||
|
#[macro_use]
|
||||||
|
#[path = "macros/fallback_supplement_co_v1.data.rs"]
|
||||||
|
mod fallback_supplement_co_v1;
|
||||||
|
#[doc(inline)]
|
||||||
|
pub use __impl_fallback_supplement_co_v1 as impl_fallback_supplement_co_v1;
|
||||||
|
#[macro_use]
|
||||||
|
#[path = "macros/list_and_v1.data.rs"]
|
||||||
|
mod list_and_v1;
|
||||||
|
#[doc(inline)]
|
||||||
|
pub use __impl_list_and_v1 as impl_list_and_v1;
|
||||||
@ -0,0 +1,40 @@
|
|||||||
|
// @generated
|
||||||
|
/// Implement `DataProvider<LocaleFallbackLikelySubtagsV1Marker>` on the given struct using the data
|
||||||
|
/// hardcoded in this file. This allows the struct to be used with
|
||||||
|
/// `icu`'s `_unstable` constructors.
|
||||||
|
#[doc(hidden)]
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! __impl_fallback_likelysubtags_v1 {
|
||||||
|
($ provider : ty) => {
|
||||||
|
#[clippy::msrv = "1.66"]
|
||||||
|
const _: () = <$provider>::MUST_USE_MAKE_PROVIDER_MACRO;
|
||||||
|
#[clippy::msrv = "1.66"]
|
||||||
|
impl $provider {
|
||||||
|
#[doc(hidden)]
|
||||||
|
pub const SINGLETON_FALLBACK_LIKELYSUBTAGS_V1: &'static <icu_locid_transform::provider::LocaleFallbackLikelySubtagsV1Marker as icu_provider::DataMarker>::Yokeable = &icu_locid_transform::provider::LocaleFallbackLikelySubtagsV1 {
|
||||||
|
l2s: unsafe {
|
||||||
|
#[allow(unused_unsafe)]
|
||||||
|
zerovec::ZeroMap::from_parts_unchecked(unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"am\0ar\0as\0be\0bg\0bgcbhobn\0brxchrcv\0doiel\0fa\0gu\0he\0hi\0hy\0ja\0ka\0kk\0km\0kn\0ko\0kokks\0ky\0lo\0maimk\0ml\0mn\0mnimr\0my\0ne\0or\0pa\0ps\0rajru\0sa\0satsd\0si\0sr\0ta\0te\0tg\0th\0ti\0tt\0uk\0ur\0yuezh\0") }, unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"EthiArabBengCyrlCyrlDevaDevaBengDevaCherCyrlDevaGrekArabGujrHebrDevaArmnJpanGeorCyrlKhmrKndaKoreDevaArabCyrlLaooDevaCyrlMlymCyrlBengDevaMymrDevaOryaGuruArabDevaCyrlDevaOlckArabSinhCyrlTamlTeluCyrlThaiEthiCyrlCyrlArabHantHans") })
|
||||||
|
},
|
||||||
|
lr2s: unsafe {
|
||||||
|
#[allow(unused_unsafe)]
|
||||||
|
zerovec::ZeroMap2d::from_parts_unchecked(unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"az\0ha\0kk\0ky\0mn\0ms\0pa\0sd\0sr\0tg\0uz\0yuezh\0") }, unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"\x03\0\0\0\x05\0\0\0\t\0\0\0\x0B\0\0\0\x0C\0\0\0\r\0\0\0\x0E\0\0\0\x0F\0\0\0\x13\0\0\0\x14\0\0\0\x16\0\0\0\x17\0\0\0&\0\0\0") }, unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"IQ\0IR\0RU\0CM\0SD\0AF\0CN\0IR\0MN\0CN\0TR\0CN\0CC\0PK\0IN\0ME\0RO\0RU\0TR\0PK\0AF\0CN\0CN\0AU\0BN\0GB\0GF\0HK\0ID\0MO\0PA\0PF\0PH\0SR\0TH\0TW\0US\0VN\0") }, unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"ArabArabCyrlArabArabArabArabArabArabArabLatnMongArabArabDevaLatnLatnLatnLatnArabArabCyrlHansHantHantHantHantHantHantHantHantHantHantHantHantHantHantHant") })
|
||||||
|
},
|
||||||
|
l2r: unsafe {
|
||||||
|
#[allow(unused_unsafe)]
|
||||||
|
zerovec::ZeroMap::from_parts_unchecked(unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"af\0am\0ar\0as\0astaz\0be\0bg\0bgcbhobn\0br\0brxbs\0ca\0cebchrcs\0cv\0cy\0da\0de\0doidsbel\0en\0es\0et\0eu\0fa\0ff\0fi\0filfo\0fr\0ga\0gd\0gl\0gu\0ha\0he\0hi\0hr\0hsbhu\0hy\0ia\0id\0ig\0is\0it\0ja\0jv\0ka\0keakgpkk\0km\0kn\0ko\0kokks\0ky\0lo\0lt\0lv\0maimi\0mk\0ml\0mn\0mnimr\0ms\0my\0ne\0nl\0nn\0no\0or\0pa\0pcmpl\0ps\0pt\0qu\0rajrm\0ro\0ru\0sa\0satsc\0sd\0si\0sk\0sl\0so\0sq\0sr\0su\0sv\0sw\0ta\0te\0tg\0th\0ti\0tk\0to\0tr\0tt\0uk\0ur\0uz\0vi\0wo\0xh\0yo\0yrlyuezh\0zu\0") }, unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"ZA\0ET\0EG\0IN\0ES\0AZ\0BY\0BG\0IN\0IN\0BD\0FR\0IN\0BA\0ES\0PH\0US\0CZ\0RU\0GB\0DK\0DE\0IN\0DE\0GR\0US\0ES\0EE\0ES\0IR\0SN\0FI\0PH\0FO\0FR\0IE\0GB\0ES\0IN\0NG\0IL\0IN\0HR\0DE\0HU\0AM\x00001ID\0NG\0IS\0IT\0JP\0ID\0GE\0CV\0BR\0KZ\0KH\0IN\0KR\0IN\0IN\0KG\0LA\0LT\0LV\0IN\0NZ\0MK\0IN\0MN\0IN\0IN\0MY\0MM\0NP\0NL\0NO\0NO\0IN\0IN\0NG\0PL\0AF\0BR\0PE\0IN\0CH\0RO\0RU\0IN\0IN\0IT\0PK\0LK\0SK\0SI\0SO\0AL\0RS\0ID\0SE\0TZ\0IN\0IN\0TJ\0TH\0ET\0TM\0TO\0TR\0RU\0UA\0PK\0UZ\0VN\0SN\0ZA\0NG\0BR\0HK\0CN\0ZA\0") })
|
||||||
|
},
|
||||||
|
ls2r: unsafe {
|
||||||
|
#[allow(unused_unsafe)]
|
||||||
|
zerovec::ZeroMap2d::from_parts_unchecked(unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"az\0en\0ff\0kk\0ky\0mn\0pa\0sd\0tg\0uz\0yuezh\0") }, unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"\x01\0\0\0\x02\0\0\0\x03\0\0\0\x04\0\0\0\x06\0\0\0\x07\0\0\0\x08\0\0\0\x0B\0\0\0\x0C\0\0\0\r\0\0\0\x0E\0\0\0\x11\0\0\0") }, unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"ArabShawAdlmArabArabLatnMongArabDevaKhojSindArabArabHansBopoHanbHant") }, unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"IR\0GB\0GN\0CN\0CN\0TR\0CN\0PK\0IN\0IN\0IN\0PK\0AF\0CN\0TW\0TW\0TW\0") })
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
#[clippy::msrv = "1.66"]
|
||||||
|
impl icu_provider::DataProvider<icu_locid_transform::provider::LocaleFallbackLikelySubtagsV1Marker> for $provider {
|
||||||
|
fn load(&self, req: icu_provider::DataRequest) -> Result<icu_provider::DataResponse<icu_locid_transform::provider::LocaleFallbackLikelySubtagsV1Marker>, icu_provider::DataError> {
|
||||||
|
if req.locale.is_empty() { Ok(icu_provider::DataResponse { payload: Some(icu_provider::DataPayload::from_static_ref(Self::SINGLETON_FALLBACK_LIKELYSUBTAGS_V1)), metadata: Default::default() }) } else { Err(icu_provider::DataErrorKind::ExtraneousLocale.with_req(<icu_locid_transform::provider::LocaleFallbackLikelySubtagsV1Marker as icu_provider::KeyedDataMarker>::KEY, req)) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
@ -0,0 +1,28 @@
|
|||||||
|
// @generated
|
||||||
|
/// Implement `DataProvider<LocaleFallbackParentsV1Marker>` on the given struct using the data
|
||||||
|
/// hardcoded in this file. This allows the struct to be used with
|
||||||
|
/// `icu`'s `_unstable` constructors.
|
||||||
|
#[doc(hidden)]
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! __impl_fallback_parents_v1 {
|
||||||
|
($ provider : ty) => {
|
||||||
|
#[clippy::msrv = "1.66"]
|
||||||
|
const _: () = <$provider>::MUST_USE_MAKE_PROVIDER_MACRO;
|
||||||
|
#[clippy::msrv = "1.66"]
|
||||||
|
impl $provider {
|
||||||
|
#[doc(hidden)]
|
||||||
|
pub const SINGLETON_FALLBACK_PARENTS_V1: &'static <icu_locid_transform::provider::LocaleFallbackParentsV1Marker as icu_provider::DataMarker>::Yokeable = &icu_locid_transform::provider::LocaleFallbackParentsV1 {
|
||||||
|
parents: unsafe {
|
||||||
|
#[allow(unused_unsafe)]
|
||||||
|
zerovec::ZeroMap::from_parts_unchecked(unsafe { zerovec::VarZeroVec::from_bytes_unchecked(b"\x84\0\0\0\0\0\x06\0\x0B\0\x10\0\x15\0\x1A\0\x1F\0$\0)\0.\x003\08\0=\0B\0G\0L\0Q\0V\0[\0`\0e\0j\0o\0t\0y\0~\0\x83\0\x88\0\x8D\0\x92\0\x97\0\x9C\0\xA1\0\xA6\0\xAB\0\xB0\0\xB5\0\xBA\0\xBF\0\xC4\0\xC9\0\xCE\0\xD3\0\xD8\0\xDD\0\xE2\0\xE7\0\xEC\0\xF1\0\xF6\0\xFB\0\0\x01\x05\x01\n\x01\x0F\x01\x14\x01\x19\x01\x1E\x01#\x01(\x01-\x012\x017\x01<\x01A\x01F\x01K\x01P\x01U\x01Z\x01_\x01d\x01i\x01n\x01s\x01x\x01}\x01\x82\x01\x87\x01\x8C\x01\x91\x01\x96\x01\x9B\x01\xA0\x01\xA5\x01\xAA\x01\xAF\x01\xB4\x01\xB9\x01\xBE\x01\xC3\x01\xC8\x01\xCD\x01\xD2\x01\xD7\x01\xDC\x01\xE1\x01\xE6\x01\xEB\x01\xF0\x01\xF5\x01\xFA\x01\xFF\x01\x04\x02\t\x02\x0E\x02\x13\x02\x18\x02\x1D\x02\"\x02'\x02,\x021\x026\x02;\x02@\x02G\x02I\x02K\x02M\x02R\x02W\x02\\\x02a\x02f\x02k\x02p\x02u\x02z\x02\x7F\x02\x84\x02\x89\x02en-150en-AGen-AIen-ATen-AUen-BBen-BEen-BMen-BSen-BWen-BZen-CCen-CHen-CKen-CMen-CXen-CYen-DEen-DGen-DKen-DMen-ERen-FIen-FJen-FKen-FMen-GBen-GDen-GGen-GHen-GIen-GMen-GYen-HKen-IEen-ILen-IMen-INen-IOen-JEen-JMen-KEen-KIen-KNen-KYen-LCen-LRen-LSen-MGen-MOen-MSen-MTen-MUen-MVen-MWen-MYen-NAen-NFen-NGen-NLen-NRen-NUen-NZen-PGen-PKen-PNen-PWen-RWen-SBen-SCen-SDen-SEen-SGen-SHen-SIen-SLen-SSen-SXen-SZen-TCen-TKen-TOen-TTen-TVen-TZen-UGen-VCen-VGen-VUen-WSen-ZAen-ZMen-ZWes-ARes-BOes-BRes-BZes-CLes-COes-CRes-CUes-DOes-ECes-GTes-HNes-MXes-NIes-PAes-PEes-PRes-PYes-SVes-USes-UYes-VEhi-Latnhtnbnnno-NOpt-AOpt-CHpt-CVpt-FRpt-GQpt-GWpt-LUpt-MOpt-MZpt-STpt-TLzh-Hant-MO") }, unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419en\0\0\0\0\0\0\x01IN\0fr\0\0\0\0\0\0\x01HT\0no\0\0\0\0\0\0\0\0\0\0no\0\0\0\0\0\0\0\0\0\0no\0\0\0\0\0\0\0\0\0\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0zh\0\x01Hant\x01HK\0") })
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
#[clippy::msrv = "1.66"]
|
||||||
|
impl icu_provider::DataProvider<icu_locid_transform::provider::LocaleFallbackParentsV1Marker> for $provider {
|
||||||
|
fn load(&self, req: icu_provider::DataRequest) -> Result<icu_provider::DataResponse<icu_locid_transform::provider::LocaleFallbackParentsV1Marker>, icu_provider::DataError> {
|
||||||
|
if req.locale.is_empty() { Ok(icu_provider::DataResponse { payload: Some(icu_provider::DataPayload::from_static_ref(Self::SINGLETON_FALLBACK_PARENTS_V1)), metadata: Default::default() }) } else { Err(icu_provider::DataErrorKind::ExtraneousLocale.with_req(<icu_locid_transform::provider::LocaleFallbackParentsV1Marker as icu_provider::KeyedDataMarker>::KEY, req)) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
@ -0,0 +1,32 @@
|
|||||||
|
// @generated
|
||||||
|
/// Implement `DataProvider<CollationFallbackSupplementV1Marker>` on the given struct using the data
|
||||||
|
/// hardcoded in this file. This allows the struct to be used with
|
||||||
|
/// `icu`'s `_unstable` constructors.
|
||||||
|
#[doc(hidden)]
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! __impl_fallback_supplement_co_v1 {
|
||||||
|
($ provider : ty) => {
|
||||||
|
#[clippy::msrv = "1.66"]
|
||||||
|
const _: () = <$provider>::MUST_USE_MAKE_PROVIDER_MACRO;
|
||||||
|
#[clippy::msrv = "1.66"]
|
||||||
|
impl $provider {
|
||||||
|
#[doc(hidden)]
|
||||||
|
pub const SINGLETON_FALLBACK_SUPPLEMENT_CO_V1: &'static <icu_locid_transform::provider::CollationFallbackSupplementV1Marker as icu_provider::DataMarker>::Yokeable = &icu_locid_transform::provider::LocaleFallbackSupplementV1 {
|
||||||
|
parents: unsafe {
|
||||||
|
#[allow(unused_unsafe)]
|
||||||
|
zerovec::ZeroMap::from_parts_unchecked(unsafe { zerovec::VarZeroVec::from_bytes_unchecked(b"\x01\0\0\0\0\0yue") }, unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"zh\0\x01Hant\0\0\0\0") })
|
||||||
|
},
|
||||||
|
unicode_extension_defaults: unsafe {
|
||||||
|
#[allow(unused_unsafe)]
|
||||||
|
zerovec::ZeroMap2d::from_parts_unchecked(unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"co") }, unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"\x02\0\0\0") }, unsafe { zerovec::VarZeroVec::from_bytes_unchecked(b"\x02\0\0\0\0\0\x02\0zhzh-Hant") }, unsafe { zerovec::VarZeroVec::from_bytes_unchecked(b"\x02\0\0\0\0\0\x06\0pinyinstroke") })
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
#[clippy::msrv = "1.66"]
|
||||||
|
impl icu_provider::DataProvider<icu_locid_transform::provider::CollationFallbackSupplementV1Marker> for $provider {
|
||||||
|
fn load(&self, req: icu_provider::DataRequest) -> Result<icu_provider::DataResponse<icu_locid_transform::provider::CollationFallbackSupplementV1Marker>, icu_provider::DataError> {
|
||||||
|
if req.locale.is_empty() { Ok(icu_provider::DataResponse { payload: Some(icu_provider::DataPayload::from_static_ref(Self::SINGLETON_FALLBACK_SUPPLEMENT_CO_V1)), metadata: Default::default() }) } else { Err(icu_provider::DataErrorKind::ExtraneousLocale.with_req(<icu_locid_transform::provider::CollationFallbackSupplementV1Marker as icu_provider::KeyedDataMarker>::KEY, req)) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
File diff suppressed because one or more lines are too long
@ -1,122 +1,31 @@
|
|||||||
// @generated
|
// @generated
|
||||||
#[clippy::msrv = "1.61"]
|
include!("macros.rs");
|
||||||
mod fallback;
|
|
||||||
#[clippy::msrv = "1.61"]
|
|
||||||
mod list;
|
|
||||||
#[clippy::msrv = "1.61"]
|
|
||||||
use icu_provider::prelude::*;
|
|
||||||
/// Implement [`DataProvider<M>`] on the given struct using the data
|
|
||||||
/// hardcoded in this module. This allows the struct to be used with
|
|
||||||
/// `icu`'s `_unstable` constructors.
|
|
||||||
///
|
|
||||||
/// This macro can only be called from its definition-site, i.e. right
|
|
||||||
/// after `include!`-ing the generated module.
|
|
||||||
///
|
|
||||||
/// ```compile_fail
|
|
||||||
/// struct MyDataProvider;
|
|
||||||
/// include!("/path/to/generated/mod.rs");
|
|
||||||
/// impl_data_provider(MyDataProvider);
|
|
||||||
/// ```
|
|
||||||
#[allow(unused_macros)]
|
|
||||||
macro_rules! impl_data_provider {
|
macro_rules! impl_data_provider {
|
||||||
($ provider : path) => {
|
($ provider : ty) => {
|
||||||
#[clippy::msrv = "1.61"]
|
make_provider!($provider);
|
||||||
impl DataProvider<::icu_list::provider::AndListV1Marker> for $provider {
|
impl_fallback_likelysubtags_v1!($provider);
|
||||||
fn load(&self, req: DataRequest) -> Result<DataResponse<::icu_list::provider::AndListV1Marker>, DataError> {
|
impl_fallback_parents_v1!($provider);
|
||||||
list::and_v1::lookup(&req.locale)
|
impl_fallback_supplement_co_v1!($provider);
|
||||||
.map(zerofrom::ZeroFrom::zero_from)
|
impl_list_and_v1!($provider);
|
||||||
.map(DataPayload::from_owned)
|
|
||||||
.map(|payload| DataResponse { metadata: Default::default(), payload: Some(payload) })
|
|
||||||
.ok_or_else(|| DataErrorKind::MissingLocale.with_req(::icu_list::provider::AndListV1Marker::KEY, req))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
#[clippy::msrv = "1.61"]
|
|
||||||
impl DataProvider<::icu_provider_adapters::fallback::provider::CollationFallbackSupplementV1Marker> for $provider {
|
|
||||||
fn load(
|
|
||||||
&self,
|
|
||||||
req: DataRequest,
|
|
||||||
) -> Result<DataResponse<::icu_provider_adapters::fallback::provider::CollationFallbackSupplementV1Marker>, DataError> {
|
|
||||||
fallback::supplement::co_v1::lookup(&req.locale)
|
|
||||||
.map(zerofrom::ZeroFrom::zero_from)
|
|
||||||
.map(DataPayload::from_owned)
|
|
||||||
.map(|payload| DataResponse { metadata: Default::default(), payload: Some(payload) })
|
|
||||||
.ok_or_else(|| {
|
|
||||||
DataErrorKind::MissingLocale
|
|
||||||
.with_req(::icu_provider_adapters::fallback::provider::CollationFallbackSupplementV1Marker::KEY, req)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
#[clippy::msrv = "1.61"]
|
|
||||||
impl DataProvider<::icu_provider_adapters::fallback::provider::LocaleFallbackLikelySubtagsV1Marker> for $provider {
|
|
||||||
fn load(
|
|
||||||
&self,
|
|
||||||
req: DataRequest,
|
|
||||||
) -> Result<DataResponse<::icu_provider_adapters::fallback::provider::LocaleFallbackLikelySubtagsV1Marker>, DataError> {
|
|
||||||
fallback::likelysubtags_v1::lookup(&req.locale)
|
|
||||||
.map(zerofrom::ZeroFrom::zero_from)
|
|
||||||
.map(DataPayload::from_owned)
|
|
||||||
.map(|payload| DataResponse { metadata: Default::default(), payload: Some(payload) })
|
|
||||||
.ok_or_else(|| {
|
|
||||||
DataErrorKind::MissingLocale
|
|
||||||
.with_req(::icu_provider_adapters::fallback::provider::LocaleFallbackLikelySubtagsV1Marker::KEY, req)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
#[clippy::msrv = "1.61"]
|
|
||||||
impl DataProvider<::icu_provider_adapters::fallback::provider::LocaleFallbackParentsV1Marker> for $provider {
|
|
||||||
fn load(
|
|
||||||
&self,
|
|
||||||
req: DataRequest,
|
|
||||||
) -> Result<DataResponse<::icu_provider_adapters::fallback::provider::LocaleFallbackParentsV1Marker>, DataError> {
|
|
||||||
fallback::parents_v1::lookup(&req.locale)
|
|
||||||
.map(zerofrom::ZeroFrom::zero_from)
|
|
||||||
.map(DataPayload::from_owned)
|
|
||||||
.map(|payload| DataResponse { metadata: Default::default(), payload: Some(payload) })
|
|
||||||
.ok_or_else(|| {
|
|
||||||
DataErrorKind::MissingLocale.with_req(::icu_provider_adapters::fallback::provider::LocaleFallbackParentsV1Marker::KEY, req)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
/// Implement [`AnyProvider`] on the given struct using the data
|
|
||||||
/// hardcoded in this module. This allows the struct to be used with
|
|
||||||
/// `icu`'s `_any` constructors.
|
|
||||||
///
|
|
||||||
/// This macro can only be called from its definition-site, i.e. right
|
|
||||||
/// after `include!`-ing the generated module.
|
|
||||||
///
|
|
||||||
/// ```compile_fail
|
|
||||||
/// struct MyAnyProvider;
|
|
||||||
/// include!("/path/to/generated/mod.rs");
|
|
||||||
/// impl_any_provider(MyAnyProvider);
|
|
||||||
/// ```
|
|
||||||
#[allow(unused_macros)]
|
#[allow(unused_macros)]
|
||||||
macro_rules! impl_any_provider {
|
macro_rules! impl_any_provider {
|
||||||
($ provider : path) => {
|
($ provider : ty) => {
|
||||||
#[clippy::msrv = "1.61"]
|
#[clippy::msrv = "1.66"]
|
||||||
impl AnyProvider for $provider {
|
impl icu_provider::AnyProvider for $provider {
|
||||||
fn load_any(&self, key: DataKey, req: DataRequest) -> Result<AnyResponse, DataError> {
|
fn load_any(&self, key: icu_provider::DataKey, req: icu_provider::DataRequest) -> Result<icu_provider::AnyResponse, icu_provider::DataError> {
|
||||||
const ANDLISTV1MARKER: ::icu_provider::DataKeyHash = ::icu_list::provider::AndListV1Marker::KEY.hashed();
|
|
||||||
const COLLATIONFALLBACKSUPPLEMENTV1MARKER: ::icu_provider::DataKeyHash =
|
|
||||||
::icu_provider_adapters::fallback::provider::CollationFallbackSupplementV1Marker::KEY.hashed();
|
|
||||||
const LOCALEFALLBACKLIKELYSUBTAGSV1MARKER: ::icu_provider::DataKeyHash =
|
|
||||||
::icu_provider_adapters::fallback::provider::LocaleFallbackLikelySubtagsV1Marker::KEY.hashed();
|
|
||||||
const LOCALEFALLBACKPARENTSV1MARKER: ::icu_provider::DataKeyHash =
|
|
||||||
::icu_provider_adapters::fallback::provider::LocaleFallbackParentsV1Marker::KEY.hashed();
|
|
||||||
match key.hashed() {
|
match key.hashed() {
|
||||||
ANDLISTV1MARKER => list::and_v1::lookup(&req.locale).map(AnyPayload::from_static_ref),
|
h if h == <icu_locid_transform::provider::LocaleFallbackLikelySubtagsV1Marker as icu_provider::KeyedDataMarker>::KEY.hashed() => icu_provider::DataProvider::<icu_locid_transform::provider::LocaleFallbackLikelySubtagsV1Marker>::load(self, req).map(icu_provider::DataResponse::wrap_into_any_response),
|
||||||
COLLATIONFALLBACKSUPPLEMENTV1MARKER => fallback::supplement::co_v1::lookup(&req.locale).map(AnyPayload::from_static_ref),
|
h if h == <icu_locid_transform::provider::LocaleFallbackParentsV1Marker as icu_provider::KeyedDataMarker>::KEY.hashed() => icu_provider::DataProvider::<icu_locid_transform::provider::LocaleFallbackParentsV1Marker>::load(self, req).map(icu_provider::DataResponse::wrap_into_any_response),
|
||||||
LOCALEFALLBACKLIKELYSUBTAGSV1MARKER => fallback::likelysubtags_v1::lookup(&req.locale).map(AnyPayload::from_static_ref),
|
h if h == <icu_locid_transform::provider::CollationFallbackSupplementV1Marker as icu_provider::KeyedDataMarker>::KEY.hashed() => icu_provider::DataProvider::<icu_locid_transform::provider::CollationFallbackSupplementV1Marker>::load(self, req).map(icu_provider::DataResponse::wrap_into_any_response),
|
||||||
LOCALEFALLBACKPARENTSV1MARKER => fallback::parents_v1::lookup(&req.locale).map(AnyPayload::from_static_ref),
|
h if h == <icu_list::provider::AndListV1Marker as icu_provider::KeyedDataMarker>::KEY.hashed() => icu_provider::DataProvider::<icu_list::provider::AndListV1Marker>::load(self, req).map(icu_provider::DataResponse::wrap_into_any_response),
|
||||||
_ => return Err(DataErrorKind::MissingDataKey.with_req(key, req)),
|
_ => Err(icu_provider::DataErrorKind::MissingDataKey.with_req(key, req)),
|
||||||
}
|
}
|
||||||
.map(|payload| AnyResponse { payload: Some(payload), metadata: Default::default() })
|
|
||||||
.ok_or_else(|| DataErrorKind::MissingLocale.with_req(key, req))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
#[clippy::msrv = "1.61"]
|
#[clippy::msrv = "1.66"]
|
||||||
pub struct BakedDataProvider;
|
pub struct BakedDataProvider;
|
||||||
impl_data_provider!(BakedDataProvider);
|
impl_data_provider!(BakedDataProvider);
|
||||||
|
|||||||
@ -19,6 +19,10 @@
|
|||||||
//! -k list/and@1 fallback/likelysubtags@1 fallback/parents@1 fallback/supplement/co@1 \
|
//! -k list/and@1 fallback/likelysubtags@1 fallback/parents@1 fallback/supplement/co@1 \
|
||||||
//! --cldr-tag latest --icuexport-tag latest -o src/data
|
//! --cldr-tag latest --icuexport-tag latest -o src/data
|
||||||
//! ```
|
//! ```
|
||||||
|
|
||||||
|
#![cfg_attr(not(bootstrap), allow(internal_features))]
|
||||||
|
#![cfg_attr(not(bootstrap), feature(rustdoc_internals))]
|
||||||
|
#![cfg_attr(not(bootstrap), doc(rust_logo))]
|
||||||
#![allow(elided_lifetimes_in_paths)]
|
#![allow(elided_lifetimes_in_paths)]
|
||||||
|
|
||||||
mod data {
|
mod data {
|
||||||
|
|||||||
@ -3,19 +3,16 @@ name = "rustc_borrowck"
|
|||||||
version = "0.0.0"
|
version = "0.0.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
[lib]
|
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
# tidy-alphabetical-start
|
||||||
either = "1.5.0"
|
either = "1.5.0"
|
||||||
itertools = "0.10.1"
|
itertools = "0.10.1"
|
||||||
tracing = "0.1"
|
|
||||||
polonius-engine = "0.13.0"
|
polonius-engine = "0.13.0"
|
||||||
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
|
|
||||||
rustc_data_structures = { path = "../rustc_data_structures" }
|
rustc_data_structures = { path = "../rustc_data_structures" }
|
||||||
rustc_errors = { path = "../rustc_errors" }
|
rustc_errors = { path = "../rustc_errors" }
|
||||||
|
rustc_fluent_macro = { path = "../rustc_fluent_macro" }
|
||||||
rustc_graphviz = { path = "../rustc_graphviz" }
|
rustc_graphviz = { path = "../rustc_graphviz" }
|
||||||
rustc_hir = { path = "../rustc_hir" }
|
rustc_hir = { path = "../rustc_hir" }
|
||||||
rustc_fluent_macro = { path = "../rustc_fluent_macro" }
|
|
||||||
rustc_index = { path = "../rustc_index" }
|
rustc_index = { path = "../rustc_index" }
|
||||||
rustc_infer = { path = "../rustc_infer" }
|
rustc_infer = { path = "../rustc_infer" }
|
||||||
rustc_lexer = { path = "../rustc_lexer" }
|
rustc_lexer = { path = "../rustc_lexer" }
|
||||||
@ -24,7 +21,10 @@ rustc_middle = { path = "../rustc_middle" }
|
|||||||
rustc_mir_dataflow = { path = "../rustc_mir_dataflow" }
|
rustc_mir_dataflow = { path = "../rustc_mir_dataflow" }
|
||||||
rustc_serialize = { path = "../rustc_serialize" }
|
rustc_serialize = { path = "../rustc_serialize" }
|
||||||
rustc_session = { path = "../rustc_session" }
|
rustc_session = { path = "../rustc_session" }
|
||||||
|
rustc_span = { path = "../rustc_span" }
|
||||||
rustc_target = { path = "../rustc_target" }
|
rustc_target = { path = "../rustc_target" }
|
||||||
rustc_trait_selection = { path = "../rustc_trait_selection" }
|
rustc_trait_selection = { path = "../rustc_trait_selection" }
|
||||||
rustc_traits = { path = "../rustc_traits" }
|
rustc_traits = { path = "../rustc_traits" }
|
||||||
rustc_span = { path = "../rustc_span" }
|
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
|
||||||
|
tracing = "0.1"
|
||||||
|
# tidy-alphabetical-end
|
||||||
|
|||||||
@ -1,20 +1,20 @@
|
|||||||
borrowck_assign_due_to_use_closure =
|
borrowck_assign_due_to_use_closure =
|
||||||
assignment occurs due to use in closure
|
assignment occurs due to use in closure
|
||||||
|
|
||||||
borrowck_assign_due_to_use_generator =
|
borrowck_assign_due_to_use_coroutine =
|
||||||
assign occurs due to use in generator
|
assign occurs due to use in coroutine
|
||||||
|
|
||||||
borrowck_assign_part_due_to_use_closure =
|
borrowck_assign_part_due_to_use_closure =
|
||||||
assignment to part occurs due to use in closure
|
assignment to part occurs due to use in closure
|
||||||
|
|
||||||
borrowck_assign_part_due_to_use_generator =
|
borrowck_assign_part_due_to_use_coroutine =
|
||||||
assign to part occurs due to use in generator
|
assign to part occurs due to use in coroutine
|
||||||
|
|
||||||
borrowck_borrow_due_to_use_closure =
|
borrowck_borrow_due_to_use_closure =
|
||||||
borrow occurs due to use in closure
|
borrow occurs due to use in closure
|
||||||
|
|
||||||
borrowck_borrow_due_to_use_generator =
|
borrowck_borrow_due_to_use_coroutine =
|
||||||
borrow occurs due to use in generator
|
borrow occurs due to use in coroutine
|
||||||
|
|
||||||
borrowck_calling_operator_moves_lhs =
|
borrowck_calling_operator_moves_lhs =
|
||||||
calling this operator moves the left-hand side
|
calling this operator moves the left-hand side
|
||||||
@ -142,11 +142,11 @@ borrowck_partial_var_move_by_use_in_closure =
|
|||||||
*[false] moved
|
*[false] moved
|
||||||
} due to use in closure
|
} due to use in closure
|
||||||
|
|
||||||
borrowck_partial_var_move_by_use_in_generator =
|
borrowck_partial_var_move_by_use_in_coroutine =
|
||||||
variable {$is_partial ->
|
variable {$is_partial ->
|
||||||
[true] partially moved
|
[true] partially moved
|
||||||
*[false] moved
|
*[false] moved
|
||||||
} due to use in generator
|
} due to use in coroutine
|
||||||
|
|
||||||
borrowck_returned_async_block_escaped =
|
borrowck_returned_async_block_escaped =
|
||||||
returns an `async` block that contains a reference to a captured variable, which then escapes the closure body
|
returns an `async` block that contains a reference to a captured variable, which then escapes the closure body
|
||||||
@ -180,15 +180,15 @@ borrowck_ty_no_impl_copy =
|
|||||||
borrowck_use_due_to_use_closure =
|
borrowck_use_due_to_use_closure =
|
||||||
use occurs due to use in closure
|
use occurs due to use in closure
|
||||||
|
|
||||||
borrowck_use_due_to_use_generator =
|
borrowck_use_due_to_use_coroutine =
|
||||||
use occurs due to use in generator
|
use occurs due to use in coroutine
|
||||||
|
|
||||||
borrowck_used_impl_require_static =
|
borrowck_used_impl_require_static =
|
||||||
the used `impl` has a `'static` requirement
|
the used `impl` has a `'static` requirement
|
||||||
|
|
||||||
borrowck_value_capture_here =
|
borrowck_value_capture_here =
|
||||||
value captured {$is_within ->
|
value captured {$is_within ->
|
||||||
[true] here by generator
|
[true] here by coroutine
|
||||||
*[false] here
|
*[false] here
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -207,8 +207,8 @@ borrowck_value_moved_here =
|
|||||||
borrowck_var_borrow_by_use_in_closure =
|
borrowck_var_borrow_by_use_in_closure =
|
||||||
borrow occurs due to use in closure
|
borrow occurs due to use in closure
|
||||||
|
|
||||||
borrowck_var_borrow_by_use_in_generator =
|
borrowck_var_borrow_by_use_in_coroutine =
|
||||||
borrow occurs due to use in generator
|
borrow occurs due to use in coroutine
|
||||||
|
|
||||||
borrowck_var_borrow_by_use_place_in_closure =
|
borrowck_var_borrow_by_use_place_in_closure =
|
||||||
{$is_single_var ->
|
{$is_single_var ->
|
||||||
@ -216,11 +216,11 @@ borrowck_var_borrow_by_use_place_in_closure =
|
|||||||
[false] borrows occur
|
[false] borrows occur
|
||||||
} due to use of {$place} in closure
|
} due to use of {$place} in closure
|
||||||
|
|
||||||
borrowck_var_borrow_by_use_place_in_generator =
|
borrowck_var_borrow_by_use_place_in_coroutine =
|
||||||
{$is_single_var ->
|
{$is_single_var ->
|
||||||
*[true] borrow occurs
|
*[true] borrow occurs
|
||||||
[false] borrows occur
|
[false] borrows occur
|
||||||
} due to use of {$place} in generator
|
} due to use of {$place} in coroutine
|
||||||
|
|
||||||
borrowck_var_cannot_escape_closure =
|
borrowck_var_cannot_escape_closure =
|
||||||
captured variable cannot escape `FnMut` closure body
|
captured variable cannot escape `FnMut` closure body
|
||||||
@ -234,8 +234,8 @@ borrowck_var_does_not_need_mut =
|
|||||||
borrowck_var_first_borrow_by_use_place_in_closure =
|
borrowck_var_first_borrow_by_use_place_in_closure =
|
||||||
first borrow occurs due to use of {$place} in closure
|
first borrow occurs due to use of {$place} in closure
|
||||||
|
|
||||||
borrowck_var_first_borrow_by_use_place_in_generator =
|
borrowck_var_first_borrow_by_use_place_in_coroutine =
|
||||||
first borrow occurs due to use of {$place} in generator
|
first borrow occurs due to use of {$place} in coroutine
|
||||||
|
|
||||||
borrowck_var_here_captured = variable captured here
|
borrowck_var_here_captured = variable captured here
|
||||||
|
|
||||||
@ -244,8 +244,8 @@ borrowck_var_here_defined = variable defined here
|
|||||||
borrowck_var_move_by_use_in_closure =
|
borrowck_var_move_by_use_in_closure =
|
||||||
move occurs due to use in closure
|
move occurs due to use in closure
|
||||||
|
|
||||||
borrowck_var_move_by_use_in_generator =
|
borrowck_var_move_by_use_in_coroutine =
|
||||||
move occurs due to use in generator
|
move occurs due to use in coroutine
|
||||||
|
|
||||||
borrowck_var_mutable_borrow_by_use_place_in_closure =
|
borrowck_var_mutable_borrow_by_use_place_in_closure =
|
||||||
mutable borrow occurs due to use of {$place} in closure
|
mutable borrow occurs due to use of {$place} in closure
|
||||||
@ -253,5 +253,5 @@ borrowck_var_mutable_borrow_by_use_place_in_closure =
|
|||||||
borrowck_var_second_borrow_by_use_place_in_closure =
|
borrowck_var_second_borrow_by_use_place_in_closure =
|
||||||
second borrow occurs due to use of {$place} in closure
|
second borrow occurs due to use of {$place} in closure
|
||||||
|
|
||||||
borrowck_var_second_borrow_by_use_place_in_generator =
|
borrowck_var_second_borrow_by_use_place_in_coroutine =
|
||||||
second borrow occurs due to use of {$place} in generator
|
second borrow occurs due to use of {$place} in coroutine
|
||||||
|
|||||||
@ -368,16 +368,17 @@ impl<'cx, 'tcx> crate::MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
err
|
err
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn cannot_borrow_across_generator_yield(
|
pub(crate) fn cannot_borrow_across_coroutine_yield(
|
||||||
&self,
|
&self,
|
||||||
span: Span,
|
span: Span,
|
||||||
yield_span: Span,
|
yield_span: Span,
|
||||||
) -> DiagnosticBuilder<'tcx, ErrorGuaranteed> {
|
) -> DiagnosticBuilder<'tcx, ErrorGuaranteed> {
|
||||||
|
let coroutine_kind = self.body.coroutine.as_ref().unwrap().coroutine_kind;
|
||||||
let mut err = struct_span_err!(
|
let mut err = struct_span_err!(
|
||||||
self,
|
self,
|
||||||
span,
|
span,
|
||||||
E0626,
|
E0626,
|
||||||
"borrow may still be in use when generator yields",
|
"borrow may still be in use when {coroutine_kind:#} yields",
|
||||||
);
|
);
|
||||||
err.span_label(yield_span, "possible yield occurs here");
|
err.span_label(yield_span, "possible yield occurs here");
|
||||||
err
|
err
|
||||||
|
|||||||
@ -1,6 +1,7 @@
|
|||||||
#![deny(rustc::untranslatable_diagnostic)]
|
#![deny(rustc::untranslatable_diagnostic)]
|
||||||
#![deny(rustc::diagnostic_outside_of_impl)]
|
#![deny(rustc::diagnostic_outside_of_impl)]
|
||||||
use rustc_data_structures::fx::FxIndexMap;
|
use rustc_data_structures::fx::FxIndexMap;
|
||||||
|
use rustc_data_structures::graph::WithSuccessors;
|
||||||
use rustc_index::bit_set::BitSet;
|
use rustc_index::bit_set::BitSet;
|
||||||
use rustc_middle::mir::{
|
use rustc_middle::mir::{
|
||||||
self, BasicBlock, Body, CallReturnPlaces, Location, Place, TerminatorEdges,
|
self, BasicBlock, Body, CallReturnPlaces, Location, Place, TerminatorEdges,
|
||||||
@ -222,6 +223,7 @@ impl<'tcx> OutOfScopePrecomputer<'_, 'tcx> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// This is `pub` because it's used by unstable external borrowck data users, see `consumers.rs`.
|
||||||
pub fn calculate_borrows_out_of_scope_at_location<'tcx>(
|
pub fn calculate_borrows_out_of_scope_at_location<'tcx>(
|
||||||
body: &Body<'tcx>,
|
body: &Body<'tcx>,
|
||||||
regioncx: &RegionInferenceContext<'tcx>,
|
regioncx: &RegionInferenceContext<'tcx>,
|
||||||
@ -238,15 +240,203 @@ pub fn calculate_borrows_out_of_scope_at_location<'tcx>(
|
|||||||
prec.borrows_out_of_scope_at_location
|
prec.borrows_out_of_scope_at_location
|
||||||
}
|
}
|
||||||
|
|
||||||
|
struct PoloniusOutOfScopePrecomputer<'a, 'tcx> {
|
||||||
|
visited: BitSet<mir::BasicBlock>,
|
||||||
|
visit_stack: Vec<mir::BasicBlock>,
|
||||||
|
body: &'a Body<'tcx>,
|
||||||
|
regioncx: &'a RegionInferenceContext<'tcx>,
|
||||||
|
|
||||||
|
loans_out_of_scope_at_location: FxIndexMap<Location, Vec<BorrowIndex>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, 'tcx> PoloniusOutOfScopePrecomputer<'a, 'tcx> {
|
||||||
|
fn new(body: &'a Body<'tcx>, regioncx: &'a RegionInferenceContext<'tcx>) -> Self {
|
||||||
|
Self {
|
||||||
|
visited: BitSet::new_empty(body.basic_blocks.len()),
|
||||||
|
visit_stack: vec![],
|
||||||
|
body,
|
||||||
|
regioncx,
|
||||||
|
loans_out_of_scope_at_location: FxIndexMap::default(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'tcx> PoloniusOutOfScopePrecomputer<'_, 'tcx> {
|
||||||
|
/// Loans are in scope while they are live: whether they are contained within any live region.
|
||||||
|
/// In the location-insensitive analysis, a loan will be contained in a region if the issuing
|
||||||
|
/// region can reach it in the subset graph. So this is a reachability problem.
|
||||||
|
fn precompute_loans_out_of_scope(
|
||||||
|
&mut self,
|
||||||
|
loan_idx: BorrowIndex,
|
||||||
|
issuing_region: RegionVid,
|
||||||
|
loan_issued_at: Location,
|
||||||
|
) {
|
||||||
|
let sccs = self.regioncx.constraint_sccs();
|
||||||
|
let universal_regions = self.regioncx.universal_regions();
|
||||||
|
|
||||||
|
// We first handle the cases where the loan doesn't go out of scope, depending on the issuing
|
||||||
|
// region's successors.
|
||||||
|
for successor in self.regioncx.region_graph().depth_first_search(issuing_region) {
|
||||||
|
// 1. Via applied member constraints
|
||||||
|
//
|
||||||
|
// The issuing region can flow into the choice regions, and they are either:
|
||||||
|
// - placeholders or free regions themselves,
|
||||||
|
// - or also transitively outlive a free region.
|
||||||
|
//
|
||||||
|
// That is to say, if there are applied member constraints here, the loan escapes the
|
||||||
|
// function and cannot go out of scope. We could early return here.
|
||||||
|
//
|
||||||
|
// For additional insurance via fuzzing and crater, we verify that the constraint's min
|
||||||
|
// choice indeed escapes the function. In the future, we could e.g. turn this check into
|
||||||
|
// a debug assert and early return as an optimization.
|
||||||
|
let scc = sccs.scc(successor);
|
||||||
|
for constraint in self.regioncx.applied_member_constraints(scc) {
|
||||||
|
if universal_regions.is_universal_region(constraint.min_choice) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. Via regions that are live at all points: placeholders and free regions.
|
||||||
|
//
|
||||||
|
// If the issuing region outlives such a region, its loan escapes the function and
|
||||||
|
// cannot go out of scope. We can early return.
|
||||||
|
if self.regioncx.is_region_live_at_all_points(successor) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let first_block = loan_issued_at.block;
|
||||||
|
let first_bb_data = &self.body.basic_blocks[first_block];
|
||||||
|
|
||||||
|
// The first block we visit is the one where the loan is issued, starting from the statement
|
||||||
|
// where the loan is issued: at `loan_issued_at`.
|
||||||
|
let first_lo = loan_issued_at.statement_index;
|
||||||
|
let first_hi = first_bb_data.statements.len();
|
||||||
|
|
||||||
|
if let Some(kill_location) =
|
||||||
|
self.loan_kill_location(loan_idx, loan_issued_at, first_block, first_lo, first_hi)
|
||||||
|
{
|
||||||
|
debug!("loan {:?} gets killed at {:?}", loan_idx, kill_location);
|
||||||
|
self.loans_out_of_scope_at_location.entry(kill_location).or_default().push(loan_idx);
|
||||||
|
|
||||||
|
// The loan dies within the first block, we're done and can early return.
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// The loan is not dead. Add successor BBs to the work list, if necessary.
|
||||||
|
for succ_bb in first_bb_data.terminator().successors() {
|
||||||
|
if self.visited.insert(succ_bb) {
|
||||||
|
self.visit_stack.push(succ_bb);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// We may end up visiting `first_block` again. This is not an issue: we know at this point
|
||||||
|
// that the loan is not killed in the `first_lo..=first_hi` range, so checking the
|
||||||
|
// `0..first_lo` range and the `0..first_hi` range gives the same result.
|
||||||
|
while let Some(block) = self.visit_stack.pop() {
|
||||||
|
let bb_data = &self.body[block];
|
||||||
|
let num_stmts = bb_data.statements.len();
|
||||||
|
if let Some(kill_location) =
|
||||||
|
self.loan_kill_location(loan_idx, loan_issued_at, block, 0, num_stmts)
|
||||||
|
{
|
||||||
|
debug!("loan {:?} gets killed at {:?}", loan_idx, kill_location);
|
||||||
|
self.loans_out_of_scope_at_location
|
||||||
|
.entry(kill_location)
|
||||||
|
.or_default()
|
||||||
|
.push(loan_idx);
|
||||||
|
|
||||||
|
// The loan dies within this block, so we don't need to visit its successors.
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add successor BBs to the work list, if necessary.
|
||||||
|
for succ_bb in bb_data.terminator().successors() {
|
||||||
|
if self.visited.insert(succ_bb) {
|
||||||
|
self.visit_stack.push(succ_bb);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
self.visited.clear();
|
||||||
|
assert!(self.visit_stack.is_empty(), "visit stack should be empty");
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the lowest statement in `start..=end`, where the loan goes out of scope, if any.
|
||||||
|
/// This is the statement where the issuing region can't reach any of the regions that are live
|
||||||
|
/// at this point.
|
||||||
|
fn loan_kill_location(
|
||||||
|
&self,
|
||||||
|
loan_idx: BorrowIndex,
|
||||||
|
loan_issued_at: Location,
|
||||||
|
block: BasicBlock,
|
||||||
|
start: usize,
|
||||||
|
end: usize,
|
||||||
|
) -> Option<Location> {
|
||||||
|
for statement_index in start..=end {
|
||||||
|
let location = Location { block, statement_index };
|
||||||
|
|
||||||
|
// Check whether the issuing region can reach local regions that are live at this point:
|
||||||
|
// - a loan is always live at its issuing location because it can reach the issuing
|
||||||
|
// region, which is always live at this location.
|
||||||
|
if location == loan_issued_at {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// - the loan goes out of scope at `location` if it's not contained within any regions
|
||||||
|
// live at this point.
|
||||||
|
//
|
||||||
|
// FIXME: if the issuing region `i` can reach a live region `r` at point `p`, and `r` is
|
||||||
|
// live at point `q`, then it's guaranteed that `i` would reach `r` at point `q`.
|
||||||
|
// Reachability is location-insensitive, and we could take advantage of that, by jumping
|
||||||
|
// to a further point than just the next statement: we can jump to the furthest point
|
||||||
|
// within the block where `r` is live.
|
||||||
|
if self.regioncx.is_loan_live_at(loan_idx, location) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// No live region is reachable from the issuing region: the loan is killed at this
|
||||||
|
// point.
|
||||||
|
return Some(location);
|
||||||
|
}
|
||||||
|
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl<'a, 'tcx> Borrows<'a, 'tcx> {
|
impl<'a, 'tcx> Borrows<'a, 'tcx> {
|
||||||
pub fn new(
|
pub fn new(
|
||||||
tcx: TyCtxt<'tcx>,
|
tcx: TyCtxt<'tcx>,
|
||||||
body: &'a Body<'tcx>,
|
body: &'a Body<'tcx>,
|
||||||
nonlexical_regioncx: &'a RegionInferenceContext<'tcx>,
|
regioncx: &'a RegionInferenceContext<'tcx>,
|
||||||
borrow_set: &'a BorrowSet<'tcx>,
|
borrow_set: &'a BorrowSet<'tcx>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
let borrows_out_of_scope_at_location =
|
let mut borrows_out_of_scope_at_location =
|
||||||
calculate_borrows_out_of_scope_at_location(body, nonlexical_regioncx, borrow_set);
|
calculate_borrows_out_of_scope_at_location(body, regioncx, borrow_set);
|
||||||
|
|
||||||
|
// The in-tree polonius analysis computes loans going out of scope using the set-of-loans
|
||||||
|
// model, and makes sure they're identical to the existing computation of the set-of-points
|
||||||
|
// model.
|
||||||
|
if tcx.sess.opts.unstable_opts.polonius.is_next_enabled() {
|
||||||
|
let mut polonius_prec = PoloniusOutOfScopePrecomputer::new(body, regioncx);
|
||||||
|
for (loan_idx, loan_data) in borrow_set.iter_enumerated() {
|
||||||
|
let issuing_region = loan_data.region;
|
||||||
|
let loan_issued_at = loan_data.reserve_location;
|
||||||
|
|
||||||
|
polonius_prec.precompute_loans_out_of_scope(
|
||||||
|
loan_idx,
|
||||||
|
issuing_region,
|
||||||
|
loan_issued_at,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
borrows_out_of_scope_at_location, polonius_prec.loans_out_of_scope_at_location,
|
||||||
|
"the loans out of scope must be the same as the borrows out of scope"
|
||||||
|
);
|
||||||
|
|
||||||
|
borrows_out_of_scope_at_location = polonius_prec.loans_out_of_scope_at_location;
|
||||||
|
}
|
||||||
|
|
||||||
Borrows { tcx, body, borrow_set, borrows_out_of_scope_at_location }
|
Borrows { tcx, body, borrow_set, borrows_out_of_scope_at_location }
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -333,6 +523,13 @@ impl<'tcx> rustc_mir_dataflow::AnalysisDomain<'tcx> for Borrows<'_, 'tcx> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Forward dataflow computation of the set of borrows that are in scope at a particular location.
|
||||||
|
/// - we gen the introduced loans
|
||||||
|
/// - we kill loans on locals going out of (regular) scope
|
||||||
|
/// - we kill the loans going out of their region's NLL scope: in NLL terms, the frontier where a
|
||||||
|
/// region stops containing the CFG points reachable from the issuing location.
|
||||||
|
/// - we also kill loans of conflicting places when overwriting a shared path: e.g. borrows of
|
||||||
|
/// `a.b.c` when `a` is overwritten.
|
||||||
impl<'tcx> rustc_mir_dataflow::GenKillAnalysis<'tcx> for Borrows<'_, 'tcx> {
|
impl<'tcx> rustc_mir_dataflow::GenKillAnalysis<'tcx> for Borrows<'_, 'tcx> {
|
||||||
type Idx = BorrowIndex;
|
type Idx = BorrowIndex;
|
||||||
|
|
||||||
|
|||||||
@ -44,7 +44,7 @@ pub fn categorize(context: PlaceContext) -> Option<DefUse> {
|
|||||||
PlaceContext::MutatingUse(MutatingUseContext::Projection) |
|
PlaceContext::MutatingUse(MutatingUseContext::Projection) |
|
||||||
|
|
||||||
// Borrows only consider their local used at the point of the borrow.
|
// Borrows only consider their local used at the point of the borrow.
|
||||||
// This won't affect the results since we use this analysis for generators
|
// This won't affect the results since we use this analysis for coroutines
|
||||||
// and we only care about the result at suspension points. Borrows cannot
|
// and we only care about the result at suspension points. Borrows cannot
|
||||||
// cross suspension points so this behavior is unproblematic.
|
// cross suspension points so this behavior is unproblematic.
|
||||||
PlaceContext::MutatingUse(MutatingUseContext::Borrow) |
|
PlaceContext::MutatingUse(MutatingUseContext::Borrow) |
|
||||||
|
|||||||
@ -8,7 +8,7 @@ use rustc_errors::{
|
|||||||
use rustc_hir as hir;
|
use rustc_hir as hir;
|
||||||
use rustc_hir::def::{DefKind, Res};
|
use rustc_hir::def::{DefKind, Res};
|
||||||
use rustc_hir::intravisit::{walk_block, walk_expr, Visitor};
|
use rustc_hir::intravisit::{walk_block, walk_expr, Visitor};
|
||||||
use rustc_hir::{AsyncGeneratorKind, GeneratorKind, LangItem};
|
use rustc_hir::{CoroutineKind, CoroutineSource, LangItem};
|
||||||
use rustc_infer::traits::ObligationCause;
|
use rustc_infer::traits::ObligationCause;
|
||||||
use rustc_middle::hir::nested_filter::OnlyBodies;
|
use rustc_middle::hir::nested_filter::OnlyBodies;
|
||||||
use rustc_middle::mir::tcx::PlaceTy;
|
use rustc_middle::mir::tcx::PlaceTy;
|
||||||
@ -351,7 +351,9 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
}
|
}
|
||||||
// Check if we are in a situation of `ident @ ident` where we want to suggest
|
// Check if we are in a situation of `ident @ ident` where we want to suggest
|
||||||
// `ref ident @ ref ident` or `ref ident @ Struct { ref ident }`.
|
// `ref ident @ ref ident` or `ref ident @ Struct { ref ident }`.
|
||||||
if let Some(subpat) = sub && self.pat.is_none() {
|
if let Some(subpat) = sub
|
||||||
|
&& self.pat.is_none()
|
||||||
|
{
|
||||||
self.visit_pat(subpat);
|
self.visit_pat(subpat);
|
||||||
if self.pat.is_some() {
|
if self.pat.is_some() {
|
||||||
self.parent_pat = Some(p);
|
self.parent_pat = Some(p);
|
||||||
@ -370,7 +372,9 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
let mut finder =
|
let mut finder =
|
||||||
ExpressionFinder { expr_span: move_span, expr: None, pat: None, parent_pat: None };
|
ExpressionFinder { expr_span: move_span, expr: None, pat: None, parent_pat: None };
|
||||||
finder.visit_expr(expr);
|
finder.visit_expr(expr);
|
||||||
if let Some(span) = span && let Some(expr) = finder.expr {
|
if let Some(span) = span
|
||||||
|
&& let Some(expr) = finder.expr
|
||||||
|
{
|
||||||
for (_, expr) in hir.parent_iter(expr.hir_id) {
|
for (_, expr) in hir.parent_iter(expr.hir_id) {
|
||||||
if let hir::Node::Expr(expr) = expr {
|
if let hir::Node::Expr(expr) = expr {
|
||||||
if expr.span.contains(span) {
|
if expr.span.contains(span) {
|
||||||
@ -425,10 +429,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
Some(hir::intravisit::FnKind::Method(..)) => "method",
|
Some(hir::intravisit::FnKind::Method(..)) => "method",
|
||||||
Some(hir::intravisit::FnKind::Closure) => "closure",
|
Some(hir::intravisit::FnKind::Closure) => "closure",
|
||||||
};
|
};
|
||||||
span.push_span_label(
|
span.push_span_label(ident.span, format!("in this {descr}"));
|
||||||
ident.span,
|
|
||||||
format!("in this {descr}"),
|
|
||||||
);
|
|
||||||
err.span_note(
|
err.span_note(
|
||||||
span,
|
span,
|
||||||
format!(
|
format!(
|
||||||
@ -441,15 +442,16 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
let ty = place.ty(self.body, self.infcx.tcx).ty;
|
let ty = place.ty(self.body, self.infcx.tcx).ty;
|
||||||
if let hir::Node::Expr(parent_expr) = parent
|
if let hir::Node::Expr(parent_expr) = parent
|
||||||
&& let hir::ExprKind::Call(call_expr, _) = parent_expr.kind
|
&& let hir::ExprKind::Call(call_expr, _) = parent_expr.kind
|
||||||
&& let hir::ExprKind::Path(
|
&& let hir::ExprKind::Path(hir::QPath::LangItem(
|
||||||
hir::QPath::LangItem(LangItem::IntoIterIntoIter, _, _)
|
LangItem::IntoIterIntoIter,
|
||||||
) = call_expr.kind
|
_,
|
||||||
|
_,
|
||||||
|
)) = call_expr.kind
|
||||||
{
|
{
|
||||||
// Do not suggest `.clone()` in a `for` loop, we already suggest borrowing.
|
// Do not suggest `.clone()` in a `for` loop, we already suggest borrowing.
|
||||||
} else if let UseSpans::FnSelfUse {
|
} else if let UseSpans::FnSelfUse { kind: CallKind::Normal { .. }, .. } =
|
||||||
kind: CallKind::Normal { .. },
|
move_spans
|
||||||
..
|
{
|
||||||
} = move_spans {
|
|
||||||
// We already suggest cloning for these cases in `explain_captures`.
|
// We already suggest cloning for these cases in `explain_captures`.
|
||||||
} else {
|
} else {
|
||||||
self.suggest_cloning(err, ty, expr, move_span);
|
self.suggest_cloning(err, ty, expr, move_span);
|
||||||
@ -602,10 +604,10 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
if self.sugg_span.is_some() {
|
if self.sugg_span.is_some() {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if let hir::StmtKind::Local(hir::Local {
|
if let hir::StmtKind::Local(hir::Local { span, ty, init: None, .. }) = &ex.kind
|
||||||
span, ty, init: None, ..
|
&& span.contains(self.decl_span)
|
||||||
}) = &ex.kind && span.contains(self.decl_span) {
|
{
|
||||||
self.sugg_span = ty.map_or(Some(self.decl_span), |ty| Some(ty.span));
|
self.sugg_span = ty.map_or(Some(self.decl_span), |ty| Some(ty.span));
|
||||||
}
|
}
|
||||||
hir::intravisit::walk_stmt(self, ex);
|
hir::intravisit::walk_stmt(self, ex);
|
||||||
}
|
}
|
||||||
@ -743,19 +745,14 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
".clone()".to_owned()
|
".clone()".to_owned()
|
||||||
};
|
};
|
||||||
if let Some(clone_trait_def) = tcx.lang_items().clone_trait()
|
if let Some(clone_trait_def) = tcx.lang_items().clone_trait()
|
||||||
&& self.infcx
|
&& self
|
||||||
.type_implements_trait(
|
.infcx
|
||||||
clone_trait_def,
|
.type_implements_trait(clone_trait_def, [ty], self.param_env)
|
||||||
[ty],
|
|
||||||
self.param_env,
|
|
||||||
)
|
|
||||||
.must_apply_modulo_regions()
|
.must_apply_modulo_regions()
|
||||||
{
|
{
|
||||||
let msg = if let ty::Adt(def, _) = ty.kind()
|
let msg = if let ty::Adt(def, _) = ty.kind()
|
||||||
&& [
|
&& [tcx.get_diagnostic_item(sym::Arc), tcx.get_diagnostic_item(sym::Rc)]
|
||||||
tcx.get_diagnostic_item(sym::Arc),
|
.contains(&Some(def.did()))
|
||||||
tcx.get_diagnostic_item(sym::Rc),
|
|
||||||
].contains(&Some(def.did()))
|
|
||||||
{
|
{
|
||||||
"clone the value to increment its reference count"
|
"clone the value to increment its reference count"
|
||||||
} else {
|
} else {
|
||||||
@ -851,7 +848,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
move_spans.var_subdiag(None, &mut err, None, |kind, var_span| {
|
move_spans.var_subdiag(None, &mut err, None, |kind, var_span| {
|
||||||
use crate::session_diagnostics::CaptureVarCause::*;
|
use crate::session_diagnostics::CaptureVarCause::*;
|
||||||
match kind {
|
match kind {
|
||||||
Some(_) => MoveUseInGenerator { var_span },
|
Some(_) => MoveUseInCoroutine { var_span },
|
||||||
None => MoveUseInClosure { var_span },
|
None => MoveUseInClosure { var_span },
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@ -897,7 +894,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
let desc_place = self.describe_any_place(place.as_ref());
|
let desc_place = self.describe_any_place(place.as_ref());
|
||||||
match kind {
|
match kind {
|
||||||
Some(_) => {
|
Some(_) => {
|
||||||
BorrowUsePlaceGenerator { place: desc_place, var_span, is_single_var: true }
|
BorrowUsePlaceCoroutine { place: desc_place, var_span, is_single_var: true }
|
||||||
}
|
}
|
||||||
None => BorrowUsePlaceClosure { place: desc_place, var_span, is_single_var: true },
|
None => BorrowUsePlaceClosure { place: desc_place, var_span, is_single_var: true },
|
||||||
}
|
}
|
||||||
@ -929,8 +926,8 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
let borrow_spans = self.borrow_spans(span, location);
|
let borrow_spans = self.borrow_spans(span, location);
|
||||||
let span = borrow_spans.args_or_use();
|
let span = borrow_spans.args_or_use();
|
||||||
|
|
||||||
let container_name = if issued_spans.for_generator() || borrow_spans.for_generator() {
|
let container_name = if issued_spans.for_coroutine() || borrow_spans.for_coroutine() {
|
||||||
"generator"
|
"coroutine"
|
||||||
} else {
|
} else {
|
||||||
"closure"
|
"closure"
|
||||||
};
|
};
|
||||||
@ -1043,7 +1040,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
|kind, var_span| {
|
|kind, var_span| {
|
||||||
use crate::session_diagnostics::CaptureVarCause::*;
|
use crate::session_diagnostics::CaptureVarCause::*;
|
||||||
match kind {
|
match kind {
|
||||||
Some(_) => BorrowUsePlaceGenerator {
|
Some(_) => BorrowUsePlaceCoroutine {
|
||||||
place: desc_place,
|
place: desc_place,
|
||||||
var_span,
|
var_span,
|
||||||
is_single_var: true,
|
is_single_var: true,
|
||||||
@ -1127,7 +1124,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
borrow_spans.var_subdiag(None, &mut err, Some(gen_borrow_kind), |kind, var_span| {
|
borrow_spans.var_subdiag(None, &mut err, Some(gen_borrow_kind), |kind, var_span| {
|
||||||
use crate::session_diagnostics::CaptureVarCause::*;
|
use crate::session_diagnostics::CaptureVarCause::*;
|
||||||
match kind {
|
match kind {
|
||||||
Some(_) => BorrowUsePlaceGenerator {
|
Some(_) => BorrowUsePlaceCoroutine {
|
||||||
place: desc_place,
|
place: desc_place,
|
||||||
var_span,
|
var_span,
|
||||||
is_single_var: false,
|
is_single_var: false,
|
||||||
@ -1148,7 +1145,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
let borrow_place_desc = self.describe_any_place(borrow_place.as_ref());
|
let borrow_place_desc = self.describe_any_place(borrow_place.as_ref());
|
||||||
match kind {
|
match kind {
|
||||||
Some(_) => {
|
Some(_) => {
|
||||||
FirstBorrowUsePlaceGenerator { place: borrow_place_desc, var_span }
|
FirstBorrowUsePlaceCoroutine { place: borrow_place_desc, var_span }
|
||||||
}
|
}
|
||||||
None => FirstBorrowUsePlaceClosure { place: borrow_place_desc, var_span },
|
None => FirstBorrowUsePlaceClosure { place: borrow_place_desc, var_span },
|
||||||
}
|
}
|
||||||
@ -1162,7 +1159,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
|kind, var_span| {
|
|kind, var_span| {
|
||||||
use crate::session_diagnostics::CaptureVarCause::*;
|
use crate::session_diagnostics::CaptureVarCause::*;
|
||||||
match kind {
|
match kind {
|
||||||
Some(_) => SecondBorrowUsePlaceGenerator { place: desc_place, var_span },
|
Some(_) => SecondBorrowUsePlaceCoroutine { place: desc_place, var_span },
|
||||||
None => SecondBorrowUsePlaceClosure { place: desc_place, var_span },
|
None => SecondBorrowUsePlaceClosure { place: desc_place, var_span },
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -1328,42 +1325,160 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
issue_span: Span,
|
issue_span: Span,
|
||||||
expr_span: Span,
|
expr_span: Span,
|
||||||
body_expr: Option<&'hir hir::Expr<'hir>>,
|
body_expr: Option<&'hir hir::Expr<'hir>>,
|
||||||
loop_bind: Option<Symbol>,
|
loop_bind: Option<&'hir Ident>,
|
||||||
|
loop_span: Option<Span>,
|
||||||
|
head_span: Option<Span>,
|
||||||
|
pat_span: Option<Span>,
|
||||||
|
head: Option<&'hir hir::Expr<'hir>>,
|
||||||
}
|
}
|
||||||
impl<'hir> Visitor<'hir> for ExprFinder<'hir> {
|
impl<'hir> Visitor<'hir> for ExprFinder<'hir> {
|
||||||
fn visit_expr(&mut self, ex: &'hir hir::Expr<'hir>) {
|
fn visit_expr(&mut self, ex: &'hir hir::Expr<'hir>) {
|
||||||
if let hir::ExprKind::Loop(hir::Block{ stmts: [stmt, ..], ..}, _, hir::LoopSource::ForLoop, _) = ex.kind &&
|
// Try to find
|
||||||
let hir::StmtKind::Expr(hir::Expr{ kind: hir::ExprKind::Match(call, [_, bind, ..], _), ..}) = stmt.kind &&
|
// let result = match IntoIterator::into_iter(<head>) {
|
||||||
let hir::ExprKind::Call(path, _args) = call.kind &&
|
// mut iter => {
|
||||||
let hir::ExprKind::Path(hir::QPath::LangItem(LangItem::IteratorNext, _, _, )) = path.kind &&
|
// [opt_ident]: loop {
|
||||||
let hir::PatKind::Struct(path, [field, ..], _) = bind.pat.kind &&
|
// match Iterator::next(&mut iter) {
|
||||||
let hir::QPath::LangItem(LangItem::OptionSome, _, _) = path &&
|
// None => break,
|
||||||
let PatField { pat: hir::Pat{ kind: hir::PatKind::Binding(_, _, ident, ..), .. }, ..} = field &&
|
// Some(<pat>) => <body>,
|
||||||
self.issue_span.source_equal(call.span) {
|
// };
|
||||||
self.loop_bind = Some(ident.name);
|
// }
|
||||||
|
// }
|
||||||
|
// };
|
||||||
|
// corresponding to the desugaring of a for loop `for <pat> in <head> { <body> }`.
|
||||||
|
if let hir::ExprKind::Call(path, [arg]) = ex.kind
|
||||||
|
&& let hir::ExprKind::Path(hir::QPath::LangItem(
|
||||||
|
LangItem::IntoIterIntoIter,
|
||||||
|
_,
|
||||||
|
_,
|
||||||
|
)) = path.kind
|
||||||
|
&& arg.span.contains(self.issue_span)
|
||||||
|
{
|
||||||
|
// Find `IntoIterator::into_iter(<head>)`
|
||||||
|
self.head = Some(arg);
|
||||||
|
}
|
||||||
|
if let hir::ExprKind::Loop(
|
||||||
|
hir::Block { stmts: [stmt, ..], .. },
|
||||||
|
_,
|
||||||
|
hir::LoopSource::ForLoop,
|
||||||
|
_,
|
||||||
|
) = ex.kind
|
||||||
|
&& let hir::StmtKind::Expr(hir::Expr {
|
||||||
|
kind: hir::ExprKind::Match(call, [_, bind, ..], _),
|
||||||
|
span: head_span,
|
||||||
|
..
|
||||||
|
}) = stmt.kind
|
||||||
|
&& let hir::ExprKind::Call(path, _args) = call.kind
|
||||||
|
&& let hir::ExprKind::Path(hir::QPath::LangItem(LangItem::IteratorNext, _, _)) =
|
||||||
|
path.kind
|
||||||
|
&& let hir::PatKind::Struct(path, [field, ..], _) = bind.pat.kind
|
||||||
|
&& let hir::QPath::LangItem(LangItem::OptionSome, pat_span, _) = path
|
||||||
|
&& call.span.contains(self.issue_span)
|
||||||
|
{
|
||||||
|
// Find `<pat>` and the span for the whole `for` loop.
|
||||||
|
if let PatField {
|
||||||
|
pat: hir::Pat { kind: hir::PatKind::Binding(_, _, ident, ..), .. },
|
||||||
|
..
|
||||||
|
} = field
|
||||||
|
{
|
||||||
|
self.loop_bind = Some(ident);
|
||||||
}
|
}
|
||||||
|
self.head_span = Some(*head_span);
|
||||||
|
self.pat_span = Some(pat_span);
|
||||||
|
self.loop_span = Some(stmt.span);
|
||||||
|
}
|
||||||
|
|
||||||
if let hir::ExprKind::MethodCall(body_call, _recv, ..) = ex.kind &&
|
if let hir::ExprKind::MethodCall(body_call, recv, ..) = ex.kind
|
||||||
body_call.ident.name == sym::next && ex.span.source_equal(self.expr_span) {
|
&& body_call.ident.name == sym::next
|
||||||
self.body_expr = Some(ex);
|
&& recv.span.source_equal(self.expr_span)
|
||||||
|
{
|
||||||
|
self.body_expr = Some(ex);
|
||||||
}
|
}
|
||||||
|
|
||||||
hir::intravisit::walk_expr(self, ex);
|
hir::intravisit::walk_expr(self, ex);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let mut finder =
|
let mut finder = ExprFinder {
|
||||||
ExprFinder { expr_span: span, issue_span, loop_bind: None, body_expr: None };
|
expr_span: span,
|
||||||
|
issue_span,
|
||||||
|
loop_bind: None,
|
||||||
|
body_expr: None,
|
||||||
|
head_span: None,
|
||||||
|
loop_span: None,
|
||||||
|
pat_span: None,
|
||||||
|
head: None,
|
||||||
|
};
|
||||||
finder.visit_expr(hir.body(body_id).value);
|
finder.visit_expr(hir.body(body_id).value);
|
||||||
|
|
||||||
if let Some(loop_bind) = finder.loop_bind &&
|
if let Some(body_expr) = finder.body_expr
|
||||||
let Some(body_expr) = finder.body_expr &&
|
&& let Some(loop_span) = finder.loop_span
|
||||||
let Some(def_id) = typeck_results.type_dependent_def_id(body_expr.hir_id) &&
|
&& let Some(def_id) = typeck_results.type_dependent_def_id(body_expr.hir_id)
|
||||||
let Some(trait_did) = tcx.trait_of_item(def_id) &&
|
&& let Some(trait_did) = tcx.trait_of_item(def_id)
|
||||||
tcx.is_diagnostic_item(sym::Iterator, trait_did) {
|
&& tcx.is_diagnostic_item(sym::Iterator, trait_did)
|
||||||
err.note(format!(
|
{
|
||||||
"a for loop advances the iterator for you, the result is stored in `{loop_bind}`."
|
if let Some(loop_bind) = finder.loop_bind {
|
||||||
|
err.note(format!(
|
||||||
|
"a for loop advances the iterator for you, the result is stored in `{}`",
|
||||||
|
loop_bind.name,
|
||||||
|
));
|
||||||
|
} else {
|
||||||
|
err.note(
|
||||||
|
"a for loop advances the iterator for you, the result is stored in its pattern",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
let msg = "if you want to call `next` on a iterator within the loop, consider using \
|
||||||
|
`while let`";
|
||||||
|
if let Some(head) = finder.head
|
||||||
|
&& let Some(pat_span) = finder.pat_span
|
||||||
|
&& loop_span.contains(body_expr.span)
|
||||||
|
&& loop_span.contains(head.span)
|
||||||
|
{
|
||||||
|
let sm = self.infcx.tcx.sess.source_map();
|
||||||
|
|
||||||
|
let mut sugg = vec![];
|
||||||
|
if let hir::ExprKind::Path(hir::QPath::Resolved(None, _)) = head.kind {
|
||||||
|
// A bare path doesn't need a `let` assignment, it's already a simple
|
||||||
|
// binding access.
|
||||||
|
// As a new binding wasn't added, we don't need to modify the advancing call.
|
||||||
|
sugg.push((loop_span.with_hi(pat_span.lo()), format!("while let Some(")));
|
||||||
|
sugg.push((
|
||||||
|
pat_span.shrink_to_hi().with_hi(head.span.lo()),
|
||||||
|
") = ".to_string(),
|
||||||
));
|
));
|
||||||
err.help("if you want to call `next` on a iterator within the loop, consider using `while let`.");
|
sugg.push((head.span.shrink_to_hi(), ".next()".to_string()));
|
||||||
|
} else {
|
||||||
|
// Needs a new a `let` binding.
|
||||||
|
let indent = if let Some(indent) = sm.indentation_before(loop_span) {
|
||||||
|
format!("\n{indent}")
|
||||||
|
} else {
|
||||||
|
" ".to_string()
|
||||||
|
};
|
||||||
|
let Ok(head_str) = sm.span_to_snippet(head.span) else {
|
||||||
|
err.help(msg);
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
sugg.push((
|
||||||
|
loop_span.with_hi(pat_span.lo()),
|
||||||
|
format!("let iter = {head_str};{indent}while let Some("),
|
||||||
|
));
|
||||||
|
sugg.push((
|
||||||
|
pat_span.shrink_to_hi().with_hi(head.span.hi()),
|
||||||
|
") = iter.next()".to_string(),
|
||||||
|
));
|
||||||
|
// As a new binding was added, we should change how the iterator is advanced to
|
||||||
|
// use the newly introduced binding.
|
||||||
|
if let hir::ExprKind::MethodCall(_, recv, ..) = body_expr.kind
|
||||||
|
&& let hir::ExprKind::Path(hir::QPath::Resolved(None, ..)) = recv.kind
|
||||||
|
{
|
||||||
|
// As we introduced a `let iter = <head>;`, we need to change where the
|
||||||
|
// already borrowed value was accessed from `<recv>.next()` to
|
||||||
|
// `iter.next()`.
|
||||||
|
sugg.push((recv.span, "iter".to_string()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
err.multipart_suggestion(msg, sugg, Applicability::MaybeIncorrect);
|
||||||
|
} else {
|
||||||
|
err.help(msg);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1459,7 +1574,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
|
|
||||||
// Get closure's arguments
|
// Get closure's arguments
|
||||||
let ty::Closure(_, args) = typeck_results.expr_ty(closure_expr).kind() else {
|
let ty::Closure(_, args) = typeck_results.expr_ty(closure_expr).kind() else {
|
||||||
/* hir::Closure can be a generator too */
|
/* hir::Closure can be a coroutine too */
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
let sig = args.as_closure().sig();
|
let sig = args.as_closure().sig();
|
||||||
@ -1539,69 +1654,80 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
fn visit_expr(&mut self, e: &'hir hir::Expr<'hir>) {
|
fn visit_expr(&mut self, e: &'hir hir::Expr<'hir>) {
|
||||||
if e.span.contains(self.capture_span) {
|
if e.span.contains(self.capture_span) {
|
||||||
if let hir::ExprKind::Closure(&hir::Closure {
|
if let hir::ExprKind::Closure(&hir::Closure {
|
||||||
movability: None,
|
movability: None,
|
||||||
body,
|
body,
|
||||||
fn_arg_span,
|
fn_arg_span,
|
||||||
fn_decl: hir::FnDecl{ inputs, .. },
|
fn_decl: hir::FnDecl { inputs, .. },
|
||||||
..
|
..
|
||||||
}) = e.kind &&
|
}) = e.kind
|
||||||
let Some(hir::Node::Expr(body )) = self.hir.find(body.hir_id) {
|
&& let Some(hir::Node::Expr(body)) = self.hir.find(body.hir_id)
|
||||||
self.suggest_arg = "this: &Self".to_string();
|
{
|
||||||
if inputs.len() > 0 {
|
self.suggest_arg = "this: &Self".to_string();
|
||||||
self.suggest_arg.push_str(", ");
|
if inputs.len() > 0 {
|
||||||
}
|
self.suggest_arg.push_str(", ");
|
||||||
self.in_closure = true;
|
}
|
||||||
self.closure_arg_span = fn_arg_span;
|
self.in_closure = true;
|
||||||
self.visit_expr(body);
|
self.closure_arg_span = fn_arg_span;
|
||||||
self.in_closure = false;
|
self.visit_expr(body);
|
||||||
|
self.in_closure = false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if let hir::Expr { kind: hir::ExprKind::Path(path), .. } = e {
|
if let hir::Expr { kind: hir::ExprKind::Path(path), .. } = e {
|
||||||
if let hir::QPath::Resolved(_, hir::Path { segments: [seg], ..}) = path &&
|
if let hir::QPath::Resolved(_, hir::Path { segments: [seg], .. }) = path
|
||||||
seg.ident.name == kw::SelfLower && self.in_closure {
|
&& seg.ident.name == kw::SelfLower
|
||||||
self.closure_change_spans.push(e.span);
|
&& self.in_closure
|
||||||
|
{
|
||||||
|
self.closure_change_spans.push(e.span);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
hir::intravisit::walk_expr(self, e);
|
hir::intravisit::walk_expr(self, e);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_local(&mut self, local: &'hir hir::Local<'hir>) {
|
fn visit_local(&mut self, local: &'hir hir::Local<'hir>) {
|
||||||
if let hir::Pat { kind: hir::PatKind::Binding(_, hir_id, _ident, _), .. } = local.pat &&
|
if let hir::Pat { kind: hir::PatKind::Binding(_, hir_id, _ident, _), .. } =
|
||||||
let Some(init) = local.init
|
local.pat
|
||||||
|
&& let Some(init) = local.init
|
||||||
{
|
{
|
||||||
if let hir::Expr { kind: hir::ExprKind::Closure(&hir::Closure {
|
if let hir::Expr {
|
||||||
movability: None,
|
kind: hir::ExprKind::Closure(&hir::Closure { movability: None, .. }),
|
||||||
..
|
..
|
||||||
}), .. } = init &&
|
} = init
|
||||||
init.span.contains(self.capture_span) {
|
&& init.span.contains(self.capture_span)
|
||||||
self.closure_local_id = Some(*hir_id);
|
{
|
||||||
|
self.closure_local_id = Some(*hir_id);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
hir::intravisit::walk_local(self, local);
|
hir::intravisit::walk_local(self, local);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_stmt(&mut self, s: &'hir hir::Stmt<'hir>) {
|
fn visit_stmt(&mut self, s: &'hir hir::Stmt<'hir>) {
|
||||||
if let hir::StmtKind::Semi(e) = s.kind &&
|
if let hir::StmtKind::Semi(e) = s.kind
|
||||||
let hir::ExprKind::Call(hir::Expr { kind: hir::ExprKind::Path(path), ..}, args) = e.kind &&
|
&& let hir::ExprKind::Call(
|
||||||
let hir::QPath::Resolved(_, hir::Path { segments: [seg], ..}) = path &&
|
hir::Expr { kind: hir::ExprKind::Path(path), .. },
|
||||||
let Res::Local(hir_id) = seg.res &&
|
args,
|
||||||
Some(hir_id) == self.closure_local_id {
|
) = e.kind
|
||||||
let (span, arg_str) = if args.len() > 0 {
|
&& let hir::QPath::Resolved(_, hir::Path { segments: [seg], .. }) = path
|
||||||
(args[0].span.shrink_to_lo(), "self, ".to_string())
|
&& let Res::Local(hir_id) = seg.res
|
||||||
} else {
|
&& Some(hir_id) == self.closure_local_id
|
||||||
let span = e.span.trim_start(seg.ident.span).unwrap_or(e.span);
|
{
|
||||||
(span, "(self)".to_string())
|
let (span, arg_str) = if args.len() > 0 {
|
||||||
};
|
(args[0].span.shrink_to_lo(), "self, ".to_string())
|
||||||
self.closure_call_changes.push((span, arg_str));
|
} else {
|
||||||
|
let span = e.span.trim_start(seg.ident.span).unwrap_or(e.span);
|
||||||
|
(span, "(self)".to_string())
|
||||||
|
};
|
||||||
|
self.closure_call_changes.push((span, arg_str));
|
||||||
}
|
}
|
||||||
hir::intravisit::walk_stmt(self, s);
|
hir::intravisit::walk_stmt(self, s);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(hir::Node::ImplItem(
|
if let Some(hir::Node::ImplItem(hir::ImplItem {
|
||||||
hir::ImplItem { kind: hir::ImplItemKind::Fn(_fn_sig, body_id), .. }
|
kind: hir::ImplItemKind::Fn(_fn_sig, body_id),
|
||||||
)) = hir.find(self.mir_hir_id()) &&
|
..
|
||||||
let Some(hir::Node::Expr(expr)) = hir.find(body_id.hir_id) {
|
})) = hir.find(self.mir_hir_id())
|
||||||
|
&& let Some(hir::Node::Expr(expr)) = hir.find(body_id.hir_id)
|
||||||
|
{
|
||||||
let mut finder = ExpressionFinder {
|
let mut finder = ExpressionFinder {
|
||||||
capture_span: *capture_kind_span,
|
capture_span: *capture_kind_span,
|
||||||
closure_change_spans: vec![],
|
closure_change_spans: vec![],
|
||||||
@ -1822,7 +1948,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
(
|
(
|
||||||
Some(name),
|
Some(name),
|
||||||
BorrowExplanation::UsedLater(LaterUseKind::ClosureCapture, var_or_use_span, _),
|
BorrowExplanation::UsedLater(LaterUseKind::ClosureCapture, var_or_use_span, _),
|
||||||
) if borrow_spans.for_generator() || borrow_spans.for_closure() => self
|
) if borrow_spans.for_coroutine() || borrow_spans.for_closure() => self
|
||||||
.report_escaping_closure_capture(
|
.report_escaping_closure_capture(
|
||||||
borrow_spans,
|
borrow_spans,
|
||||||
borrow_span,
|
borrow_span,
|
||||||
@ -1847,7 +1973,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
span,
|
span,
|
||||||
..
|
..
|
||||||
},
|
},
|
||||||
) if borrow_spans.for_generator() || borrow_spans.for_closure() => self
|
) if borrow_spans.for_coroutine() || borrow_spans.for_closure() => self
|
||||||
.report_escaping_closure_capture(
|
.report_escaping_closure_capture(
|
||||||
borrow_spans,
|
borrow_spans,
|
||||||
borrow_span,
|
borrow_span,
|
||||||
@ -1950,8 +2076,8 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
.unwrap_or_else(|| {
|
.unwrap_or_else(|| {
|
||||||
match &self.infcx.tcx.def_kind(self.mir_def_id()) {
|
match &self.infcx.tcx.def_kind(self.mir_def_id()) {
|
||||||
DefKind::Closure => "enclosing closure",
|
DefKind::Closure => "enclosing closure",
|
||||||
DefKind::Generator => "enclosing generator",
|
DefKind::Coroutine => "enclosing coroutine",
|
||||||
kind => bug!("expected closure or generator, found {:?}", kind),
|
kind => bug!("expected closure or coroutine, found {:?}", kind),
|
||||||
}
|
}
|
||||||
.to_string()
|
.to_string()
|
||||||
})
|
})
|
||||||
@ -1985,7 +2111,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
|
|
||||||
borrow_spans.args_subdiag(&mut err, |args_span| {
|
borrow_spans.args_subdiag(&mut err, |args_span| {
|
||||||
crate::session_diagnostics::CaptureArgLabel::Capture {
|
crate::session_diagnostics::CaptureArgLabel::Capture {
|
||||||
is_within: borrow_spans.for_generator(),
|
is_within: borrow_spans.for_coroutine(),
|
||||||
args_span,
|
args_span,
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@ -2136,6 +2262,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
current: usize,
|
current: usize,
|
||||||
found: usize,
|
found: usize,
|
||||||
prop_expr: Option<&'tcx hir::Expr<'tcx>>,
|
prop_expr: Option<&'tcx hir::Expr<'tcx>>,
|
||||||
|
call: Option<&'tcx hir::Expr<'tcx>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'tcx> Visitor<'tcx> for NestedStatementVisitor<'tcx> {
|
impl<'tcx> Visitor<'tcx> for NestedStatementVisitor<'tcx> {
|
||||||
@ -2145,6 +2272,11 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
self.current -= 1;
|
self.current -= 1;
|
||||||
}
|
}
|
||||||
fn visit_expr(&mut self, expr: &'tcx hir::Expr<'tcx>) {
|
fn visit_expr(&mut self, expr: &'tcx hir::Expr<'tcx>) {
|
||||||
|
if let hir::ExprKind::MethodCall(_, rcvr, _, _) = expr.kind {
|
||||||
|
if self.span == rcvr.span.source_callsite() {
|
||||||
|
self.call = Some(expr);
|
||||||
|
}
|
||||||
|
}
|
||||||
if self.span == expr.span.source_callsite() {
|
if self.span == expr.span.source_callsite() {
|
||||||
self.found = self.current;
|
self.found = self.current;
|
||||||
if self.prop_expr.is_none() {
|
if self.prop_expr.is_none() {
|
||||||
@ -2168,25 +2300,43 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
current: 0,
|
current: 0,
|
||||||
found: 0,
|
found: 0,
|
||||||
prop_expr: None,
|
prop_expr: None,
|
||||||
|
call: None,
|
||||||
};
|
};
|
||||||
visitor.visit_stmt(stmt);
|
visitor.visit_stmt(stmt);
|
||||||
|
|
||||||
let typeck_results = self.infcx.tcx.typeck(self.mir_def_id());
|
let typeck_results = self.infcx.tcx.typeck(self.mir_def_id());
|
||||||
let expr_ty: Option<Ty<'_>> = visitor.prop_expr.map(|expr| typeck_results.expr_ty(expr).peel_refs());
|
let expr_ty: Option<Ty<'_>> =
|
||||||
|
visitor.prop_expr.map(|expr| typeck_results.expr_ty(expr).peel_refs());
|
||||||
|
|
||||||
let is_format_arguments_item =
|
let is_format_arguments_item = if let Some(expr_ty) = expr_ty
|
||||||
if let Some(expr_ty) = expr_ty
|
&& let ty::Adt(adt, _) = expr_ty.kind()
|
||||||
&& let ty::Adt(adt, _) = expr_ty.kind() {
|
{
|
||||||
self.infcx.tcx.lang_items().get(LangItem::FormatArguments) == Some(adt.did())
|
self.infcx.tcx.lang_items().get(LangItem::FormatArguments)
|
||||||
} else {
|
== Some(adt.did())
|
||||||
false
|
} else {
|
||||||
};
|
false
|
||||||
|
};
|
||||||
|
|
||||||
if visitor.found == 0
|
if visitor.found == 0
|
||||||
&& stmt.span.contains(proper_span)
|
&& stmt.span.contains(proper_span)
|
||||||
&& let Some(p) = sm.span_to_margin(stmt.span)
|
&& let Some(p) = sm.span_to_margin(stmt.span)
|
||||||
&& let Ok(s) = sm.span_to_snippet(proper_span)
|
&& let Ok(s) = sm.span_to_snippet(proper_span)
|
||||||
{
|
{
|
||||||
|
if let Some(call) = visitor.call
|
||||||
|
&& let hir::ExprKind::MethodCall(path, _, [], _) = call.kind
|
||||||
|
&& path.ident.name == sym::iter
|
||||||
|
&& let Some(ty) = expr_ty
|
||||||
|
{
|
||||||
|
err.span_suggestion_verbose(
|
||||||
|
path.ident.span,
|
||||||
|
format!(
|
||||||
|
"consider consuming the `{ty}` when turning it into an \
|
||||||
|
`Iterator`",
|
||||||
|
),
|
||||||
|
"into_iter".to_string(),
|
||||||
|
Applicability::MaybeIncorrect,
|
||||||
|
);
|
||||||
|
}
|
||||||
if !is_format_arguments_item {
|
if !is_format_arguments_item {
|
||||||
let addition = format!("let binding = {};\n{}", s, " ".repeat(p));
|
let addition = format!("let binding = {};\n{}", s, " ".repeat(p));
|
||||||
err.multipart_suggestion_verbose(
|
err.multipart_suggestion_verbose(
|
||||||
@ -2224,7 +2374,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
|
|
||||||
borrow_spans.args_subdiag(&mut err, |args_span| {
|
borrow_spans.args_subdiag(&mut err, |args_span| {
|
||||||
crate::session_diagnostics::CaptureArgLabel::Capture {
|
crate::session_diagnostics::CaptureArgLabel::Capture {
|
||||||
is_within: borrow_spans.for_generator(),
|
is_within: borrow_spans.for_coroutine(),
|
||||||
args_span,
|
args_span,
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@ -2340,11 +2490,17 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
|
|
||||||
let (sugg_span, suggestion) = match tcx.sess.source_map().span_to_snippet(args_span) {
|
let (sugg_span, suggestion) = match tcx.sess.source_map().span_to_snippet(args_span) {
|
||||||
Ok(string) => {
|
Ok(string) => {
|
||||||
if string.starts_with("async ") {
|
let coro_prefix = if string.starts_with("async") {
|
||||||
let pos = args_span.lo() + BytePos(6);
|
// `async` is 5 chars long. Not using `.len()` to avoid the cast from `usize` to `u32`
|
||||||
(args_span.with_lo(pos).with_hi(pos), "move ")
|
Some(5)
|
||||||
} else if string.starts_with("async|") {
|
} else if string.starts_with("gen") {
|
||||||
let pos = args_span.lo() + BytePos(5);
|
// `gen` is 3 chars long
|
||||||
|
Some(3)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
if let Some(n) = coro_prefix {
|
||||||
|
let pos = args_span.lo() + BytePos(n);
|
||||||
(args_span.with_lo(pos).with_hi(pos), " move")
|
(args_span.with_lo(pos).with_hi(pos), " move")
|
||||||
} else {
|
} else {
|
||||||
(args_span.shrink_to_lo(), "move ")
|
(args_span.shrink_to_lo(), "move ")
|
||||||
@ -2352,14 +2508,19 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
}
|
}
|
||||||
Err(_) => (args_span, "move |<args>| <body>"),
|
Err(_) => (args_span, "move |<args>| <body>"),
|
||||||
};
|
};
|
||||||
let kind = match use_span.generator_kind() {
|
let kind = match use_span.coroutine_kind() {
|
||||||
Some(generator_kind) => match generator_kind {
|
Some(coroutine_kind) => match coroutine_kind {
|
||||||
GeneratorKind::Async(async_kind) => match async_kind {
|
CoroutineKind::Gen(kind) => match kind {
|
||||||
AsyncGeneratorKind::Block => "async block",
|
CoroutineSource::Block => "gen block",
|
||||||
AsyncGeneratorKind::Closure => "async closure",
|
CoroutineSource::Closure => "gen closure",
|
||||||
|
_ => bug!("gen block/closure expected, but gen function found."),
|
||||||
|
},
|
||||||
|
CoroutineKind::Async(async_kind) => match async_kind {
|
||||||
|
CoroutineSource::Block => "async block",
|
||||||
|
CoroutineSource::Closure => "async closure",
|
||||||
_ => bug!("async block/closure expected, but async function found."),
|
_ => bug!("async block/closure expected, but async function found."),
|
||||||
},
|
},
|
||||||
GeneratorKind::Gen => "generator",
|
CoroutineKind::Coroutine => "coroutine",
|
||||||
},
|
},
|
||||||
None => "closure",
|
None => "closure",
|
||||||
};
|
};
|
||||||
@ -2388,7 +2549,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
}
|
}
|
||||||
ConstraintCategory::CallArgument(_) => {
|
ConstraintCategory::CallArgument(_) => {
|
||||||
fr_name.highlight_region_name(&mut err);
|
fr_name.highlight_region_name(&mut err);
|
||||||
if matches!(use_span.generator_kind(), Some(GeneratorKind::Async(_))) {
|
if matches!(use_span.coroutine_kind(), Some(CoroutineKind::Async(_))) {
|
||||||
err.note(
|
err.note(
|
||||||
"async blocks are not executed immediately and must either take a \
|
"async blocks are not executed immediately and must either take a \
|
||||||
reference or ownership of outside variables they use",
|
reference or ownership of outside variables they use",
|
||||||
@ -2482,9 +2643,10 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
/* Check if the mpi is initialized as an argument */
|
/* Check if the mpi is initialized as an argument */
|
||||||
let mut is_argument = false;
|
let mut is_argument = false;
|
||||||
for arg in self.body.args_iter() {
|
for arg in self.body.args_iter() {
|
||||||
let path = self.move_data.rev_lookup.find_local(arg);
|
if let Some(path) = self.move_data.rev_lookup.find_local(arg) {
|
||||||
if mpis.contains(&path) {
|
if mpis.contains(&path) {
|
||||||
is_argument = true;
|
is_argument = true;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2656,7 +2818,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
loan_spans.var_subdiag(None, &mut err, Some(loan.kind), |kind, var_span| {
|
loan_spans.var_subdiag(None, &mut err, Some(loan.kind), |kind, var_span| {
|
||||||
use crate::session_diagnostics::CaptureVarCause::*;
|
use crate::session_diagnostics::CaptureVarCause::*;
|
||||||
match kind {
|
match kind {
|
||||||
Some(_) => BorrowUseInGenerator { var_span },
|
Some(_) => BorrowUseInCoroutine { var_span },
|
||||||
None => BorrowUseInClosure { var_span },
|
None => BorrowUseInClosure { var_span },
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@ -2672,7 +2834,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
loan_spans.var_subdiag(None, &mut err, Some(loan.kind), |kind, var_span| {
|
loan_spans.var_subdiag(None, &mut err, Some(loan.kind), |kind, var_span| {
|
||||||
use crate::session_diagnostics::CaptureVarCause::*;
|
use crate::session_diagnostics::CaptureVarCause::*;
|
||||||
match kind {
|
match kind {
|
||||||
Some(_) => BorrowUseInGenerator { var_span },
|
Some(_) => BorrowUseInCoroutine { var_span },
|
||||||
None => BorrowUseInClosure { var_span },
|
None => BorrowUseInClosure { var_span },
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|||||||
@ -76,10 +76,10 @@ impl<'tcx> BorrowExplanation<'tcx> {
|
|||||||
expr_finder.visit_expr(body.value);
|
expr_finder.visit_expr(body.value);
|
||||||
if let Some(mut expr) = expr_finder.result {
|
if let Some(mut expr) = expr_finder.result {
|
||||||
while let hir::ExprKind::AddrOf(_, _, inner)
|
while let hir::ExprKind::AddrOf(_, _, inner)
|
||||||
| hir::ExprKind::Unary(hir::UnOp::Deref, inner)
|
| hir::ExprKind::Unary(hir::UnOp::Deref, inner)
|
||||||
| hir::ExprKind::Field(inner, _)
|
| hir::ExprKind::Field(inner, _)
|
||||||
| hir::ExprKind::MethodCall(_, inner, _, _)
|
| hir::ExprKind::MethodCall(_, inner, _, _)
|
||||||
| hir::ExprKind::Index(inner, _, _) = &expr.kind
|
| hir::ExprKind::Index(inner, _, _) = &expr.kind
|
||||||
{
|
{
|
||||||
expr = inner;
|
expr = inner;
|
||||||
}
|
}
|
||||||
@ -88,10 +88,7 @@ impl<'tcx> BorrowExplanation<'tcx> {
|
|||||||
&& let hir::def::Res::Local(hir_id) = p.res
|
&& let hir::def::Res::Local(hir_id) = p.res
|
||||||
&& let Some(hir::Node::Pat(pat)) = tcx.hir().find(hir_id)
|
&& let Some(hir::Node::Pat(pat)) = tcx.hir().find(hir_id)
|
||||||
{
|
{
|
||||||
err.span_label(
|
err.span_label(pat.span, format!("binding `{ident}` declared here"));
|
||||||
pat.span,
|
|
||||||
format!("binding `{ident}` declared here"),
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -185,7 +182,7 @@ impl<'tcx> BorrowExplanation<'tcx> {
|
|||||||
// Otherwise, just report the whole type (and use
|
// Otherwise, just report the whole type (and use
|
||||||
// the intentionally fuzzy phrase "destructor")
|
// the intentionally fuzzy phrase "destructor")
|
||||||
ty::Closure(..) => ("destructor", "closure".to_owned()),
|
ty::Closure(..) => ("destructor", "closure".to_owned()),
|
||||||
ty::Generator(..) => ("destructor", "generator".to_owned()),
|
ty::Coroutine(..) => ("destructor", "coroutine".to_owned()),
|
||||||
|
|
||||||
_ => ("destructor", format!("type `{}`", local_decl.ty)),
|
_ => ("destructor", format!("type `{}`", local_decl.ty)),
|
||||||
};
|
};
|
||||||
@ -419,7 +416,8 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
if self.local_names[local].is_some()
|
if self.local_names[local].is_some()
|
||||||
&& let Some((WriteKind::StorageDeadOrDrop, place)) = kind_place
|
&& let Some((WriteKind::StorageDeadOrDrop, place)) = kind_place
|
||||||
&& let Some(borrowed_local) = place.as_local()
|
&& let Some(borrowed_local) = place.as_local()
|
||||||
&& self.local_names[borrowed_local].is_some() && local != borrowed_local
|
&& self.local_names[borrowed_local].is_some()
|
||||||
|
&& local != borrowed_local
|
||||||
{
|
{
|
||||||
should_note_order = true;
|
should_note_order = true;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -8,7 +8,7 @@ use itertools::Itertools;
|
|||||||
use rustc_errors::{Applicability, Diagnostic};
|
use rustc_errors::{Applicability, Diagnostic};
|
||||||
use rustc_hir as hir;
|
use rustc_hir as hir;
|
||||||
use rustc_hir::def::{CtorKind, Namespace};
|
use rustc_hir::def::{CtorKind, Namespace};
|
||||||
use rustc_hir::GeneratorKind;
|
use rustc_hir::CoroutineKind;
|
||||||
use rustc_index::IndexSlice;
|
use rustc_index::IndexSlice;
|
||||||
use rustc_infer::infer::LateBoundRegionConversionTime;
|
use rustc_infer::infer::LateBoundRegionConversionTime;
|
||||||
use rustc_middle::mir::tcx::PlaceTy;
|
use rustc_middle::mir::tcx::PlaceTy;
|
||||||
@ -46,6 +46,7 @@ mod mutability_errors;
|
|||||||
mod region_errors;
|
mod region_errors;
|
||||||
|
|
||||||
pub(crate) use bound_region_errors::{ToUniverseInfo, UniverseInfo};
|
pub(crate) use bound_region_errors::{ToUniverseInfo, UniverseInfo};
|
||||||
|
pub(crate) use move_errors::{IllegalMoveOriginKind, MoveError};
|
||||||
pub(crate) use mutability_errors::AccessKind;
|
pub(crate) use mutability_errors::AccessKind;
|
||||||
pub(crate) use outlives_suggestion::OutlivesSuggestionBuilder;
|
pub(crate) use outlives_suggestion::OutlivesSuggestionBuilder;
|
||||||
pub(crate) use region_errors::{ErrorConstraintInfo, RegionErrorKind, RegionErrors};
|
pub(crate) use region_errors::{ErrorConstraintInfo, RegionErrorKind, RegionErrors};
|
||||||
@ -369,7 +370,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
ty::Array(ty, _) | ty::Slice(ty) => {
|
ty::Array(ty, _) | ty::Slice(ty) => {
|
||||||
self.describe_field_from_ty(ty, field, variant_index, including_tuple_field)
|
self.describe_field_from_ty(ty, field, variant_index, including_tuple_field)
|
||||||
}
|
}
|
||||||
ty::Closure(def_id, _) | ty::Generator(def_id, _, _) => {
|
ty::Closure(def_id, _) | ty::Coroutine(def_id, _, _) => {
|
||||||
// We won't be borrowck'ing here if the closure came from another crate,
|
// We won't be borrowck'ing here if the closure came from another crate,
|
||||||
// so it's safe to call `expect_local`.
|
// so it's safe to call `expect_local`.
|
||||||
//
|
//
|
||||||
@ -470,7 +471,8 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
ty.print(printer).unwrap().into_buffer()
|
ty.print(&mut printer).unwrap();
|
||||||
|
printer.into_buffer()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the name of the provided `Ty` (that must be a reference)'s region with a
|
/// Returns the name of the provided `Ty` (that must be a reference)'s region with a
|
||||||
@ -492,7 +494,8 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
bug!("ty for annotation of borrow region is not a reference");
|
bug!("ty for annotation of borrow region is not a reference");
|
||||||
};
|
};
|
||||||
|
|
||||||
region.print(printer).unwrap().into_buffer()
|
region.print(&mut printer).unwrap();
|
||||||
|
printer.into_buffer()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -501,8 +504,8 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
pub(super) enum UseSpans<'tcx> {
|
pub(super) enum UseSpans<'tcx> {
|
||||||
/// The access is caused by capturing a variable for a closure.
|
/// The access is caused by capturing a variable for a closure.
|
||||||
ClosureUse {
|
ClosureUse {
|
||||||
/// This is true if the captured variable was from a generator.
|
/// This is true if the captured variable was from a coroutine.
|
||||||
generator_kind: Option<GeneratorKind>,
|
coroutine_kind: Option<CoroutineKind>,
|
||||||
/// The span of the args of the closure, including the `move` keyword if
|
/// The span of the args of the closure, including the `move` keyword if
|
||||||
/// it's present.
|
/// it's present.
|
||||||
args_span: Span,
|
args_span: Span,
|
||||||
@ -569,9 +572,9 @@ impl UseSpans<'_> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn generator_kind(self) -> Option<GeneratorKind> {
|
pub(super) fn coroutine_kind(self) -> Option<CoroutineKind> {
|
||||||
match self {
|
match self {
|
||||||
UseSpans::ClosureUse { generator_kind, .. } => generator_kind,
|
UseSpans::ClosureUse { coroutine_kind, .. } => coroutine_kind,
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -596,14 +599,14 @@ impl UseSpans<'_> {
|
|||||||
) {
|
) {
|
||||||
use crate::InitializationRequiringAction::*;
|
use crate::InitializationRequiringAction::*;
|
||||||
use CaptureVarPathUseCause::*;
|
use CaptureVarPathUseCause::*;
|
||||||
if let UseSpans::ClosureUse { generator_kind, path_span, .. } = self {
|
if let UseSpans::ClosureUse { coroutine_kind, path_span, .. } = self {
|
||||||
match generator_kind {
|
match coroutine_kind {
|
||||||
Some(_) => {
|
Some(_) => {
|
||||||
err.subdiagnostic(match action {
|
err.subdiagnostic(match action {
|
||||||
Borrow => BorrowInGenerator { path_span },
|
Borrow => BorrowInCoroutine { path_span },
|
||||||
MatchOn | Use => UseInGenerator { path_span },
|
MatchOn | Use => UseInCoroutine { path_span },
|
||||||
Assignment => AssignInGenerator { path_span },
|
Assignment => AssignInCoroutine { path_span },
|
||||||
PartialAssignment => AssignPartInGenerator { path_span },
|
PartialAssignment => AssignPartInCoroutine { path_span },
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
@ -624,9 +627,9 @@ impl UseSpans<'_> {
|
|||||||
handler: Option<&rustc_errors::Handler>,
|
handler: Option<&rustc_errors::Handler>,
|
||||||
err: &mut Diagnostic,
|
err: &mut Diagnostic,
|
||||||
kind: Option<rustc_middle::mir::BorrowKind>,
|
kind: Option<rustc_middle::mir::BorrowKind>,
|
||||||
f: impl FnOnce(Option<GeneratorKind>, Span) -> CaptureVarCause,
|
f: impl FnOnce(Option<CoroutineKind>, Span) -> CaptureVarCause,
|
||||||
) {
|
) {
|
||||||
if let UseSpans::ClosureUse { generator_kind, capture_kind_span, path_span, .. } = self {
|
if let UseSpans::ClosureUse { coroutine_kind, capture_kind_span, path_span, .. } = self {
|
||||||
if capture_kind_span != path_span {
|
if capture_kind_span != path_span {
|
||||||
err.subdiagnostic(match kind {
|
err.subdiagnostic(match kind {
|
||||||
Some(kd) => match kd {
|
Some(kd) => match kd {
|
||||||
@ -642,7 +645,7 @@ impl UseSpans<'_> {
|
|||||||
None => CaptureVarKind::Move { kind_span: capture_kind_span },
|
None => CaptureVarKind::Move { kind_span: capture_kind_span },
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
let diag = f(generator_kind, path_span);
|
let diag = f(coroutine_kind, path_span);
|
||||||
match handler {
|
match handler {
|
||||||
Some(hd) => err.eager_subdiagnostic(hd, diag),
|
Some(hd) => err.eager_subdiagnostic(hd, diag),
|
||||||
None => err.subdiagnostic(diag),
|
None => err.subdiagnostic(diag),
|
||||||
@ -653,15 +656,15 @@ impl UseSpans<'_> {
|
|||||||
/// Returns `false` if this place is not used in a closure.
|
/// Returns `false` if this place is not used in a closure.
|
||||||
pub(super) fn for_closure(&self) -> bool {
|
pub(super) fn for_closure(&self) -> bool {
|
||||||
match *self {
|
match *self {
|
||||||
UseSpans::ClosureUse { generator_kind, .. } => generator_kind.is_none(),
|
UseSpans::ClosureUse { coroutine_kind, .. } => coroutine_kind.is_none(),
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns `false` if this place is not used in a generator.
|
/// Returns `false` if this place is not used in a coroutine.
|
||||||
pub(super) fn for_generator(&self) -> bool {
|
pub(super) fn for_coroutine(&self) -> bool {
|
||||||
match *self {
|
match *self {
|
||||||
UseSpans::ClosureUse { generator_kind, .. } => generator_kind.is_some(),
|
UseSpans::ClosureUse { coroutine_kind, .. } => coroutine_kind.is_some(),
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -780,19 +783,15 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
|
|
||||||
debug!("move_spans: moved_place={:?} location={:?} stmt={:?}", moved_place, location, stmt);
|
debug!("move_spans: moved_place={:?} location={:?} stmt={:?}", moved_place, location, stmt);
|
||||||
if let StatementKind::Assign(box (_, Rvalue::Aggregate(kind, places))) = &stmt.kind
|
if let StatementKind::Assign(box (_, Rvalue::Aggregate(kind, places))) = &stmt.kind
|
||||||
&& let AggregateKind::Closure(def_id, _) | AggregateKind::Generator(def_id, _, _) = **kind
|
&& let AggregateKind::Closure(def_id, _) | AggregateKind::Coroutine(def_id, _, _) =
|
||||||
|
**kind
|
||||||
{
|
{
|
||||||
debug!("move_spans: def_id={:?} places={:?}", def_id, places);
|
debug!("move_spans: def_id={:?} places={:?}", def_id, places);
|
||||||
let def_id = def_id.expect_local();
|
let def_id = def_id.expect_local();
|
||||||
if let Some((args_span, generator_kind, capture_kind_span, path_span)) =
|
if let Some((args_span, coroutine_kind, capture_kind_span, path_span)) =
|
||||||
self.closure_span(def_id, moved_place, places)
|
self.closure_span(def_id, moved_place, places)
|
||||||
{
|
{
|
||||||
return ClosureUse {
|
return ClosureUse { coroutine_kind, args_span, capture_kind_span, path_span };
|
||||||
generator_kind,
|
|
||||||
args_span,
|
|
||||||
capture_kind_span,
|
|
||||||
path_span,
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -804,11 +803,11 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
| FakeReadCause::ForLet(Some(closure_def_id)) => {
|
| FakeReadCause::ForLet(Some(closure_def_id)) => {
|
||||||
debug!("move_spans: def_id={:?} place={:?}", closure_def_id, place);
|
debug!("move_spans: def_id={:?} place={:?}", closure_def_id, place);
|
||||||
let places = &[Operand::Move(place)];
|
let places = &[Operand::Move(place)];
|
||||||
if let Some((args_span, generator_kind, capture_kind_span, path_span)) =
|
if let Some((args_span, coroutine_kind, capture_kind_span, path_span)) =
|
||||||
self.closure_span(closure_def_id, moved_place, IndexSlice::from_raw(places))
|
self.closure_span(closure_def_id, moved_place, IndexSlice::from_raw(places))
|
||||||
{
|
{
|
||||||
return ClosureUse {
|
return ClosureUse {
|
||||||
generator_kind,
|
coroutine_kind,
|
||||||
args_span,
|
args_span,
|
||||||
capture_kind_span,
|
capture_kind_span,
|
||||||
path_span,
|
path_span,
|
||||||
@ -918,21 +917,21 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
|
|
||||||
for stmt in statements.chain(maybe_additional_statement) {
|
for stmt in statements.chain(maybe_additional_statement) {
|
||||||
if let StatementKind::Assign(box (_, Rvalue::Aggregate(kind, places))) = &stmt.kind {
|
if let StatementKind::Assign(box (_, Rvalue::Aggregate(kind, places))) = &stmt.kind {
|
||||||
let (&def_id, is_generator) = match kind {
|
let (&def_id, is_coroutine) = match kind {
|
||||||
box AggregateKind::Closure(def_id, _) => (def_id, false),
|
box AggregateKind::Closure(def_id, _) => (def_id, false),
|
||||||
box AggregateKind::Generator(def_id, _, _) => (def_id, true),
|
box AggregateKind::Coroutine(def_id, _, _) => (def_id, true),
|
||||||
_ => continue,
|
_ => continue,
|
||||||
};
|
};
|
||||||
let def_id = def_id.expect_local();
|
let def_id = def_id.expect_local();
|
||||||
|
|
||||||
debug!(
|
debug!(
|
||||||
"borrow_spans: def_id={:?} is_generator={:?} places={:?}",
|
"borrow_spans: def_id={:?} is_coroutine={:?} places={:?}",
|
||||||
def_id, is_generator, places
|
def_id, is_coroutine, places
|
||||||
);
|
);
|
||||||
if let Some((args_span, generator_kind, capture_kind_span, path_span)) =
|
if let Some((args_span, coroutine_kind, capture_kind_span, path_span)) =
|
||||||
self.closure_span(def_id, Place::from(target).as_ref(), places)
|
self.closure_span(def_id, Place::from(target).as_ref(), places)
|
||||||
{
|
{
|
||||||
return ClosureUse { generator_kind, args_span, capture_kind_span, path_span };
|
return ClosureUse { coroutine_kind, args_span, capture_kind_span, path_span };
|
||||||
} else {
|
} else {
|
||||||
return OtherUse(use_span);
|
return OtherUse(use_span);
|
||||||
}
|
}
|
||||||
@ -946,7 +945,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
OtherUse(use_span)
|
OtherUse(use_span)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Finds the spans of a captured place within a closure or generator.
|
/// Finds the spans of a captured place within a closure or coroutine.
|
||||||
/// The first span is the location of the use resulting in the capture kind of the capture
|
/// The first span is the location of the use resulting in the capture kind of the capture
|
||||||
/// The second span is the location the use resulting in the captured path of the capture
|
/// The second span is the location the use resulting in the captured path of the capture
|
||||||
fn closure_span(
|
fn closure_span(
|
||||||
@ -954,7 +953,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
def_id: LocalDefId,
|
def_id: LocalDefId,
|
||||||
target_place: PlaceRef<'tcx>,
|
target_place: PlaceRef<'tcx>,
|
||||||
places: &IndexSlice<FieldIdx, Operand<'tcx>>,
|
places: &IndexSlice<FieldIdx, Operand<'tcx>>,
|
||||||
) -> Option<(Span, Option<GeneratorKind>, Span, Span)> {
|
) -> Option<(Span, Option<CoroutineKind>, Span, Span)> {
|
||||||
debug!(
|
debug!(
|
||||||
"closure_span: def_id={:?} target_place={:?} places={:?}",
|
"closure_span: def_id={:?} target_place={:?} places={:?}",
|
||||||
def_id, target_place, places
|
def_id, target_place, places
|
||||||
@ -972,11 +971,11 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
{
|
{
|
||||||
debug!("closure_span: found captured local {:?}", place);
|
debug!("closure_span: found captured local {:?}", place);
|
||||||
let body = self.infcx.tcx.hir().body(body);
|
let body = self.infcx.tcx.hir().body(body);
|
||||||
let generator_kind = body.generator_kind();
|
let coroutine_kind = body.coroutine_kind();
|
||||||
|
|
||||||
return Some((
|
return Some((
|
||||||
fn_decl_span,
|
fn_decl_span,
|
||||||
generator_kind,
|
coroutine_kind,
|
||||||
captured_place.get_capture_kind_span(self.infcx.tcx),
|
captured_place.get_capture_kind_span(self.infcx.tcx),
|
||||||
captured_place.get_path_span(self.infcx.tcx),
|
captured_place.get_path_span(self.infcx.tcx),
|
||||||
));
|
));
|
||||||
@ -1123,7 +1122,8 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
&self.infcx.tcx.sess.parse_sess.span_diagnostic,
|
&self.infcx.tcx.sess.parse_sess.span_diagnostic,
|
||||||
CaptureReasonSuggest::FreshReborrow {
|
CaptureReasonSuggest::FreshReborrow {
|
||||||
span: move_span.shrink_to_hi(),
|
span: move_span.shrink_to_hi(),
|
||||||
});
|
},
|
||||||
|
);
|
||||||
}
|
}
|
||||||
if let Some(clone_trait) = tcx.lang_items().clone_trait()
|
if let Some(clone_trait) = tcx.lang_items().clone_trait()
|
||||||
&& let trait_ref = ty::TraitRef::new(tcx, clone_trait, [ty])
|
&& let trait_ref = ty::TraitRef::new(tcx, clone_trait, [ty])
|
||||||
@ -1191,7 +1191,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
// another message for the same span
|
// another message for the same span
|
||||||
if !is_loop_message {
|
if !is_loop_message {
|
||||||
move_spans.var_subdiag(None, err, None, |kind, var_span| match kind {
|
move_spans.var_subdiag(None, err, None, |kind, var_span| match kind {
|
||||||
Some(_) => CaptureVarCause::PartialMoveUseInGenerator { var_span, is_partial },
|
Some(_) => CaptureVarCause::PartialMoveUseInCoroutine { var_span, is_partial },
|
||||||
None => CaptureVarCause::PartialMoveUseInClosure { var_span, is_partial },
|
None => CaptureVarCause::PartialMoveUseInClosure { var_span, is_partial },
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,16 +1,50 @@
|
|||||||
use rustc_errors::{Applicability, Diagnostic, DiagnosticBuilder, ErrorGuaranteed};
|
use rustc_errors::{Applicability, Diagnostic, DiagnosticBuilder, ErrorGuaranteed};
|
||||||
use rustc_middle::mir::*;
|
use rustc_middle::mir::*;
|
||||||
use rustc_middle::ty;
|
use rustc_middle::ty::{self, Ty};
|
||||||
use rustc_mir_dataflow::move_paths::{
|
use rustc_mir_dataflow::move_paths::{LookupResult, MovePathIndex};
|
||||||
IllegalMoveOrigin, IllegalMoveOriginKind, LookupResult, MoveError, MovePathIndex,
|
use rustc_span::{BytePos, ExpnKind, MacroKind, Span};
|
||||||
};
|
|
||||||
use rustc_span::{BytePos, Span};
|
|
||||||
|
|
||||||
use crate::diagnostics::CapturedMessageOpt;
|
use crate::diagnostics::CapturedMessageOpt;
|
||||||
use crate::diagnostics::{DescribePlaceOpt, UseSpans};
|
use crate::diagnostics::{DescribePlaceOpt, UseSpans};
|
||||||
use crate::prefixes::PrefixSet;
|
use crate::prefixes::PrefixSet;
|
||||||
use crate::MirBorrowckCtxt;
|
use crate::MirBorrowckCtxt;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub enum IllegalMoveOriginKind<'tcx> {
|
||||||
|
/// Illegal move due to attempt to move from behind a reference.
|
||||||
|
BorrowedContent {
|
||||||
|
/// The place the reference refers to: if erroneous code was trying to
|
||||||
|
/// move from `(*x).f` this will be `*x`.
|
||||||
|
target_place: Place<'tcx>,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Illegal move due to attempt to move from field of an ADT that
|
||||||
|
/// implements `Drop`. Rust maintains invariant that all `Drop`
|
||||||
|
/// ADT's remain fully-initialized so that user-defined destructor
|
||||||
|
/// can safely read from all of the ADT's fields.
|
||||||
|
InteriorOfTypeWithDestructor { container_ty: Ty<'tcx> },
|
||||||
|
|
||||||
|
/// Illegal move due to attempt to move out of a slice or array.
|
||||||
|
InteriorOfSliceOrArray { ty: Ty<'tcx>, is_index: bool },
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub(crate) struct MoveError<'tcx> {
|
||||||
|
place: Place<'tcx>,
|
||||||
|
location: Location,
|
||||||
|
kind: IllegalMoveOriginKind<'tcx>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'tcx> MoveError<'tcx> {
|
||||||
|
pub(crate) fn new(
|
||||||
|
place: Place<'tcx>,
|
||||||
|
location: Location,
|
||||||
|
kind: IllegalMoveOriginKind<'tcx>,
|
||||||
|
) -> Self {
|
||||||
|
MoveError { place, location, kind }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Often when desugaring a pattern match we may have many individual moves in
|
// Often when desugaring a pattern match we may have many individual moves in
|
||||||
// MIR that are all part of one operation from the user's point-of-view. For
|
// MIR that are all part of one operation from the user's point-of-view. For
|
||||||
// example:
|
// example:
|
||||||
@ -53,20 +87,18 @@ enum GroupedMoveError<'tcx> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
||||||
pub(crate) fn report_move_errors(&mut self, move_errors: Vec<(Place<'tcx>, MoveError<'tcx>)>) {
|
pub(crate) fn report_move_errors(&mut self) {
|
||||||
let grouped_errors = self.group_move_errors(move_errors);
|
let grouped_errors = self.group_move_errors();
|
||||||
for error in grouped_errors {
|
for error in grouped_errors {
|
||||||
self.report(error);
|
self.report(error);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn group_move_errors(
|
fn group_move_errors(&mut self) -> Vec<GroupedMoveError<'tcx>> {
|
||||||
&self,
|
|
||||||
errors: Vec<(Place<'tcx>, MoveError<'tcx>)>,
|
|
||||||
) -> Vec<GroupedMoveError<'tcx>> {
|
|
||||||
let mut grouped_errors = Vec::new();
|
let mut grouped_errors = Vec::new();
|
||||||
for (original_path, error) in errors {
|
let errors = std::mem::take(&mut self.move_errors);
|
||||||
self.append_to_grouped_errors(&mut grouped_errors, original_path, error);
|
for error in errors {
|
||||||
|
self.append_to_grouped_errors(&mut grouped_errors, error);
|
||||||
}
|
}
|
||||||
grouped_errors
|
grouped_errors
|
||||||
}
|
}
|
||||||
@ -74,66 +106,58 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
|||||||
fn append_to_grouped_errors(
|
fn append_to_grouped_errors(
|
||||||
&self,
|
&self,
|
||||||
grouped_errors: &mut Vec<GroupedMoveError<'tcx>>,
|
grouped_errors: &mut Vec<GroupedMoveError<'tcx>>,
|
||||||
original_path: Place<'tcx>,
|
|
||||||
error: MoveError<'tcx>,
|
error: MoveError<'tcx>,
|
||||||
) {
|
) {
|
||||||
match error {
|
let MoveError { place: original_path, location, kind } = error;
|
||||||
MoveError::UnionMove { .. } => {
|
|
||||||
unimplemented!("don't know how to report union move errors yet.")
|
|
||||||
}
|
|
||||||
MoveError::IllegalMove { cannot_move_out_of: IllegalMoveOrigin { location, kind } } => {
|
|
||||||
// Note: that the only time we assign a place isn't a temporary
|
|
||||||
// to a user variable is when initializing it.
|
|
||||||
// If that ever stops being the case, then the ever initialized
|
|
||||||
// flow could be used.
|
|
||||||
if let Some(StatementKind::Assign(box (
|
|
||||||
place,
|
|
||||||
Rvalue::Use(Operand::Move(move_from)),
|
|
||||||
))) = self.body.basic_blocks[location.block]
|
|
||||||
.statements
|
|
||||||
.get(location.statement_index)
|
|
||||||
.map(|stmt| &stmt.kind)
|
|
||||||
{
|
|
||||||
if let Some(local) = place.as_local() {
|
|
||||||
let local_decl = &self.body.local_decls[local];
|
|
||||||
// opt_match_place is the
|
|
||||||
// match_span is the span of the expression being matched on
|
|
||||||
// match *x.y { ... } match_place is Some(*x.y)
|
|
||||||
// ^^^^ match_span is the span of *x.y
|
|
||||||
//
|
|
||||||
// opt_match_place is None for let [mut] x = ... statements,
|
|
||||||
// whether or not the right-hand side is a place expression
|
|
||||||
if let LocalInfo::User(BindingForm::Var(VarBindingForm {
|
|
||||||
opt_match_place: Some((opt_match_place, match_span)),
|
|
||||||
binding_mode: _,
|
|
||||||
opt_ty_info: _,
|
|
||||||
pat_span: _,
|
|
||||||
})) = *local_decl.local_info()
|
|
||||||
{
|
|
||||||
let stmt_source_info = self.body.source_info(location);
|
|
||||||
self.append_binding_error(
|
|
||||||
grouped_errors,
|
|
||||||
kind,
|
|
||||||
original_path,
|
|
||||||
*move_from,
|
|
||||||
local,
|
|
||||||
opt_match_place,
|
|
||||||
match_span,
|
|
||||||
stmt_source_info.span,
|
|
||||||
);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let move_spans = self.move_spans(original_path.as_ref(), location);
|
// Note: that the only time we assign a place isn't a temporary
|
||||||
grouped_errors.push(GroupedMoveError::OtherIllegalMove {
|
// to a user variable is when initializing it.
|
||||||
use_spans: move_spans,
|
// If that ever stops being the case, then the ever initialized
|
||||||
original_path,
|
// flow could be used.
|
||||||
kind,
|
if let Some(StatementKind::Assign(box (place, Rvalue::Use(Operand::Move(move_from))))) =
|
||||||
});
|
self.body.basic_blocks[location.block]
|
||||||
|
.statements
|
||||||
|
.get(location.statement_index)
|
||||||
|
.map(|stmt| &stmt.kind)
|
||||||
|
{
|
||||||
|
if let Some(local) = place.as_local() {
|
||||||
|
let local_decl = &self.body.local_decls[local];
|
||||||
|
// opt_match_place is the
|
||||||
|
// match_span is the span of the expression being matched on
|
||||||
|
// match *x.y { ... } match_place is Some(*x.y)
|
||||||
|
// ^^^^ match_span is the span of *x.y
|
||||||
|
//
|
||||||
|
// opt_match_place is None for let [mut] x = ... statements,
|
||||||
|
// whether or not the right-hand side is a place expression
|
||||||
|
if let LocalInfo::User(BindingForm::Var(VarBindingForm {
|
||||||
|
opt_match_place: Some((opt_match_place, match_span)),
|
||||||
|
binding_mode: _,
|
||||||
|
opt_ty_info: _,
|
||||||
|
pat_span: _,
|
||||||
|
})) = *local_decl.local_info()
|
||||||
|
{
|
||||||
|
let stmt_source_info = self.body.source_info(location);
|
||||||
|
self.append_binding_error(
|
||||||
|
grouped_errors,
|
||||||
|
kind,
|
||||||
|
original_path,
|
||||||
|
*move_from,
|
||||||
|
local,
|
||||||
|
opt_match_place,
|
||||||
|
match_span,
|
||||||
|
stmt_source_info.span,
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let move_spans = self.move_spans(original_path.as_ref(), location);
|
||||||
|
grouped_errors.push(GroupedMoveError::OtherIllegalMove {
|
||||||
|
use_spans: move_spans,
|
||||||
|
original_path,
|
||||||
|
kind,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
fn append_binding_error(
|
fn append_binding_error(
|
||||||
@ -464,6 +488,8 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
|||||||
args_span,
|
args_span,
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
self.add_note_for_packed_struct_derive(err, original_path.local);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -570,4 +596,20 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Adds an explanatory note if the move error occurs in a derive macro
|
||||||
|
/// expansion of a packed struct.
|
||||||
|
/// Such errors happen because derive macro expansions shy away from taking
|
||||||
|
/// references to the struct's fields since doing so would be undefined behaviour
|
||||||
|
fn add_note_for_packed_struct_derive(&self, err: &mut Diagnostic, local: Local) {
|
||||||
|
let local_place: PlaceRef<'tcx> = local.into();
|
||||||
|
let local_ty = local_place.ty(self.body.local_decls(), self.infcx.tcx).ty.peel_refs();
|
||||||
|
|
||||||
|
if let Some(adt) = local_ty.ty_adt_def()
|
||||||
|
&& adt.repr().packed()
|
||||||
|
&& let ExpnKind::Macro(MacroKind::Derive, name) = self.body.span.ctxt().outer_expn_data().kind
|
||||||
|
{
|
||||||
|
err.note(format!("`#[derive({name})]` triggers a move because taking references to the fields of a packed struct is undefined behaviour"));
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -9,9 +9,8 @@ use rustc_middle::{
|
|||||||
hir::place::PlaceBase,
|
hir::place::PlaceBase,
|
||||||
mir::{self, BindingForm, Local, LocalDecl, LocalInfo, LocalKind, Location},
|
mir::{self, BindingForm, Local, LocalDecl, LocalInfo, LocalKind, Location},
|
||||||
};
|
};
|
||||||
use rustc_span::source_map::DesugaringKind;
|
|
||||||
use rustc_span::symbol::{kw, Symbol};
|
use rustc_span::symbol::{kw, Symbol};
|
||||||
use rustc_span::{sym, BytePos, Span};
|
use rustc_span::{sym, BytePos, DesugaringKind, Span};
|
||||||
use rustc_target::abi::FieldIdx;
|
use rustc_target::abi::FieldIdx;
|
||||||
|
|
||||||
use crate::diagnostics::BorrowedContentSource;
|
use crate::diagnostics::BorrowedContentSource;
|
||||||
@ -62,7 +61,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
|||||||
local,
|
local,
|
||||||
projection: [proj_base @ .., ProjectionElem::Field(upvar_index, _)],
|
projection: [proj_base @ .., ProjectionElem::Field(upvar_index, _)],
|
||||||
} => {
|
} => {
|
||||||
debug_assert!(is_closure_or_generator(
|
debug_assert!(is_closure_or_coroutine(
|
||||||
Place::ty_from(local, proj_base, self.body, self.infcx.tcx).ty
|
Place::ty_from(local, proj_base, self.body, self.infcx.tcx).ty
|
||||||
));
|
));
|
||||||
|
|
||||||
@ -122,7 +121,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
|||||||
{
|
{
|
||||||
item_msg = access_place_desc;
|
item_msg = access_place_desc;
|
||||||
debug_assert!(self.body.local_decls[ty::CAPTURE_STRUCT_LOCAL].ty.is_ref());
|
debug_assert!(self.body.local_decls[ty::CAPTURE_STRUCT_LOCAL].ty.is_ref());
|
||||||
debug_assert!(is_closure_or_generator(
|
debug_assert!(is_closure_or_coroutine(
|
||||||
the_place_err.ty(self.body, self.infcx.tcx).ty
|
the_place_err.ty(self.body, self.infcx.tcx).ty
|
||||||
));
|
));
|
||||||
|
|
||||||
@ -385,7 +384,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
|||||||
local,
|
local,
|
||||||
projection: [proj_base @ .., ProjectionElem::Field(upvar_index, _)],
|
projection: [proj_base @ .., ProjectionElem::Field(upvar_index, _)],
|
||||||
} => {
|
} => {
|
||||||
debug_assert!(is_closure_or_generator(
|
debug_assert!(is_closure_or_coroutine(
|
||||||
Place::ty_from(local, proj_base, self.body, self.infcx.tcx).ty
|
Place::ty_from(local, proj_base, self.body, self.infcx.tcx).ty
|
||||||
));
|
));
|
||||||
|
|
||||||
@ -396,17 +395,16 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
|||||||
let upvar_hir_id = captured_place.get_root_variable();
|
let upvar_hir_id = captured_place.get_root_variable();
|
||||||
|
|
||||||
if let Some(Node::Pat(pat)) = self.infcx.tcx.hir().find(upvar_hir_id)
|
if let Some(Node::Pat(pat)) = self.infcx.tcx.hir().find(upvar_hir_id)
|
||||||
&& let hir::PatKind::Binding(
|
&& let hir::PatKind::Binding(hir::BindingAnnotation::NONE, _, upvar_ident, _) =
|
||||||
hir::BindingAnnotation::NONE,
|
pat.kind
|
||||||
_,
|
|
||||||
upvar_ident,
|
|
||||||
_,
|
|
||||||
) = pat.kind
|
|
||||||
{
|
{
|
||||||
if upvar_ident.name == kw::SelfLower {
|
if upvar_ident.name == kw::SelfLower {
|
||||||
for (_, node) in self.infcx.tcx.hir().parent_iter(upvar_hir_id) {
|
for (_, node) in self.infcx.tcx.hir().parent_iter(upvar_hir_id) {
|
||||||
if let Some(fn_decl) = node.fn_decl() {
|
if let Some(fn_decl) = node.fn_decl() {
|
||||||
if !matches!(fn_decl.implicit_self, hir::ImplicitSelfKind::ImmRef | hir::ImplicitSelfKind::MutRef) {
|
if !matches!(
|
||||||
|
fn_decl.implicit_self,
|
||||||
|
hir::ImplicitSelfKind::ImmRef | hir::ImplicitSelfKind::MutRef
|
||||||
|
) {
|
||||||
err.span_suggestion(
|
err.span_suggestion(
|
||||||
upvar_ident.span,
|
upvar_ident.span,
|
||||||
"consider changing this to be mutable",
|
"consider changing this to be mutable",
|
||||||
@ -573,7 +571,8 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
|||||||
self.ty,
|
self.ty,
|
||||||
),
|
),
|
||||||
vec![
|
vec![
|
||||||
vec![ // val.insert(index, rv);
|
vec![
|
||||||
|
// val.insert(index, rv);
|
||||||
(
|
(
|
||||||
val.span.shrink_to_hi().with_hi(index.span.lo()),
|
val.span.shrink_to_hi().with_hi(index.span.lo()),
|
||||||
".insert(".to_string(),
|
".insert(".to_string(),
|
||||||
@ -584,7 +583,8 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
|||||||
),
|
),
|
||||||
(rv.span.shrink_to_hi(), ")".to_string()),
|
(rv.span.shrink_to_hi(), ")".to_string()),
|
||||||
],
|
],
|
||||||
vec![ // val.get_mut(index).map(|v| { *v = rv; });
|
vec![
|
||||||
|
// val.get_mut(index).map(|v| { *v = rv; });
|
||||||
(
|
(
|
||||||
val.span.shrink_to_hi().with_hi(index.span.lo()),
|
val.span.shrink_to_hi().with_hi(index.span.lo()),
|
||||||
".get_mut(".to_string(),
|
".get_mut(".to_string(),
|
||||||
@ -593,12 +593,10 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
|||||||
index.span.shrink_to_hi().with_hi(place.span.hi()),
|
index.span.shrink_to_hi().with_hi(place.span.hi()),
|
||||||
").map(|val| { *val".to_string(),
|
").map(|val| { *val".to_string(),
|
||||||
),
|
),
|
||||||
(
|
(rv.span.shrink_to_hi(), "; })".to_string()),
|
||||||
rv.span.shrink_to_hi(),
|
|
||||||
"; })".to_string(),
|
|
||||||
),
|
|
||||||
],
|
],
|
||||||
vec![ // let x = val.entry(index).or_insert(rv);
|
vec![
|
||||||
|
// let x = val.entry(index).or_insert(rv);
|
||||||
(val.span.shrink_to_lo(), "let val = ".to_string()),
|
(val.span.shrink_to_lo(), "let val = ".to_string()),
|
||||||
(
|
(
|
||||||
val.span.shrink_to_hi().with_hi(index.span.lo()),
|
val.span.shrink_to_hi().with_hi(index.span.lo()),
|
||||||
@ -747,10 +745,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
|||||||
&& let Some(body_id) = hir_map.maybe_body_owned_by(local_def_id)
|
&& let Some(body_id) = hir_map.maybe_body_owned_by(local_def_id)
|
||||||
{
|
{
|
||||||
let body = hir_map.body(body_id);
|
let body = hir_map.body(body_id);
|
||||||
let mut v = BindingFinder {
|
let mut v = BindingFinder { span: pat_span, hir_id: None };
|
||||||
span: pat_span,
|
|
||||||
hir_id: None,
|
|
||||||
};
|
|
||||||
v.visit_body(body);
|
v.visit_body(body);
|
||||||
v.hir_id
|
v.hir_id
|
||||||
} else {
|
} else {
|
||||||
@ -766,7 +761,8 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
|||||||
pat: hir::Pat { kind: hir::PatKind::Ref(_, _), .. },
|
pat: hir::Pat { kind: hir::PatKind::Ref(_, _), .. },
|
||||||
..
|
..
|
||||||
})) = hir_map.find(hir_id)
|
})) = hir_map.find(hir_id)
|
||||||
&& let Ok(name) = self.infcx.tcx.sess.source_map().span_to_snippet(local_decl.source_info.span)
|
&& let Ok(name) =
|
||||||
|
self.infcx.tcx.sess.source_map().span_to_snippet(local_decl.source_info.span)
|
||||||
{
|
{
|
||||||
err.span_suggestion(
|
err.span_suggestion(
|
||||||
pat_span,
|
pat_span,
|
||||||
@ -879,12 +875,11 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
|||||||
// `span` corresponds to the expression being iterated, find the `for`-loop desugared
|
// `span` corresponds to the expression being iterated, find the `for`-loop desugared
|
||||||
// expression with that span in order to identify potential fixes when encountering a
|
// expression with that span in order to identify potential fixes when encountering a
|
||||||
// read-only iterator that should be mutable.
|
// read-only iterator that should be mutable.
|
||||||
let mut v = Finder {
|
let mut v = Finder { span, expr: None };
|
||||||
span,
|
|
||||||
expr: None,
|
|
||||||
};
|
|
||||||
v.visit_block(block);
|
v.visit_block(block);
|
||||||
if let Some(expr) = v.expr && let Call(_, [expr]) = expr.kind {
|
if let Some(expr) = v.expr
|
||||||
|
&& let Call(_, [expr]) = expr.kind
|
||||||
|
{
|
||||||
match expr.kind {
|
match expr.kind {
|
||||||
MethodCall(path_segment, _, _, span) => {
|
MethodCall(path_segment, _, _, span) => {
|
||||||
// We have `for _ in iter.read_only_iter()`, try to
|
// We have `for _ in iter.read_only_iter()`, try to
|
||||||
@ -1032,38 +1027,42 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
|||||||
let source = self.body.source;
|
let source = self.body.source;
|
||||||
let hir = self.infcx.tcx.hir();
|
let hir = self.infcx.tcx.hir();
|
||||||
if let InstanceDef::Item(def_id) = source.instance
|
if let InstanceDef::Item(def_id) = source.instance
|
||||||
&& let Some(Node::Expr(hir::Expr { hir_id, kind, ..})) = hir.get_if_local(def_id)
|
&& let Some(Node::Expr(hir::Expr { hir_id, kind, .. })) = hir.get_if_local(def_id)
|
||||||
&& let ExprKind::Closure(closure) = kind && closure.movability == None
|
&& let ExprKind::Closure(closure) = kind
|
||||||
&& let Some(Node::Expr(expr)) = hir.find_parent(*hir_id) {
|
&& closure.movability == None
|
||||||
let mut cur_expr = expr;
|
&& let Some(Node::Expr(expr)) = hir.find_parent(*hir_id)
|
||||||
while let ExprKind::MethodCall(path_segment, recv, _, _) = cur_expr.kind {
|
{
|
||||||
if path_segment.ident.name == sym::iter {
|
let mut cur_expr = expr;
|
||||||
// check `_ty` has `iter_mut` method
|
while let ExprKind::MethodCall(path_segment, recv, _, _) = cur_expr.kind {
|
||||||
let res = self
|
if path_segment.ident.name == sym::iter {
|
||||||
.infcx
|
// check `_ty` has `iter_mut` method
|
||||||
.tcx
|
let res = self
|
||||||
.typeck(path_segment.hir_id.owner.def_id)
|
.infcx
|
||||||
.type_dependent_def_id(cur_expr.hir_id)
|
.tcx
|
||||||
.and_then(|def_id| self.infcx.tcx.impl_of_method(def_id))
|
.typeck(path_segment.hir_id.owner.def_id)
|
||||||
.map(|def_id| self.infcx.tcx.associated_items(def_id))
|
.type_dependent_def_id(cur_expr.hir_id)
|
||||||
.map(|assoc_items| {
|
.and_then(|def_id| self.infcx.tcx.impl_of_method(def_id))
|
||||||
assoc_items.filter_by_name_unhygienic(sym::iter_mut).peekable()
|
.map(|def_id| self.infcx.tcx.associated_items(def_id))
|
||||||
});
|
.map(|assoc_items| {
|
||||||
|
assoc_items.filter_by_name_unhygienic(sym::iter_mut).peekable()
|
||||||
|
});
|
||||||
|
|
||||||
if let Some(mut res) = res && res.peek().is_some() {
|
if let Some(mut res) = res
|
||||||
err.span_suggestion_verbose(
|
&& res.peek().is_some()
|
||||||
path_segment.ident.span,
|
{
|
||||||
"you may want to use `iter_mut` here",
|
err.span_suggestion_verbose(
|
||||||
"iter_mut",
|
path_segment.ident.span,
|
||||||
Applicability::MaybeIncorrect,
|
"you may want to use `iter_mut` here",
|
||||||
);
|
"iter_mut",
|
||||||
}
|
Applicability::MaybeIncorrect,
|
||||||
break;
|
);
|
||||||
} else {
|
|
||||||
cur_expr = recv;
|
|
||||||
}
|
}
|
||||||
|
break;
|
||||||
|
} else {
|
||||||
|
cur_expr = recv;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn suggest_make_local_mut(
|
fn suggest_make_local_mut(
|
||||||
@ -1200,14 +1199,11 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
|||||||
}
|
}
|
||||||
let hir_map = self.infcx.tcx.hir();
|
let hir_map = self.infcx.tcx.hir();
|
||||||
let def_id = self.body.source.def_id();
|
let def_id = self.body.source.def_id();
|
||||||
let hir_id = if let Some(local_def_id) = def_id.as_local() &&
|
let hir_id = if let Some(local_def_id) = def_id.as_local()
|
||||||
let Some(body_id) = hir_map.maybe_body_owned_by(local_def_id)
|
&& let Some(body_id) = hir_map.maybe_body_owned_by(local_def_id)
|
||||||
{
|
{
|
||||||
let body = hir_map.body(body_id);
|
let body = hir_map.body(body_id);
|
||||||
let mut v = BindingFinder {
|
let mut v = BindingFinder { span: err_label_span, hir_id: None };
|
||||||
span: err_label_span,
|
|
||||||
hir_id: None,
|
|
||||||
};
|
|
||||||
v.visit_body(body);
|
v.visit_body(body);
|
||||||
v.hir_id
|
v.hir_id
|
||||||
} else {
|
} else {
|
||||||
@ -1215,15 +1211,13 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
|||||||
};
|
};
|
||||||
|
|
||||||
if let Some(hir_id) = hir_id
|
if let Some(hir_id) = hir_id
|
||||||
&& let Some(hir::Node::Local(local)) = hir_map.find(hir_id)
|
&& let Some(hir::Node::Local(local)) = hir_map.find(hir_id)
|
||||||
{
|
{
|
||||||
let (changing, span, sugg) = match local.ty {
|
let (changing, span, sugg) = match local.ty {
|
||||||
Some(ty) => ("changing", ty.span, message),
|
Some(ty) => ("changing", ty.span, message),
|
||||||
None => (
|
None => {
|
||||||
"specifying",
|
("specifying", local.pat.span.shrink_to_hi(), format!(": {message}"))
|
||||||
local.pat.span.shrink_to_hi(),
|
}
|
||||||
format!(": {message}"),
|
|
||||||
),
|
|
||||||
};
|
};
|
||||||
err.span_suggestion_verbose(
|
err.span_suggestion_verbose(
|
||||||
span,
|
span,
|
||||||
@ -1234,9 +1228,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
|||||||
} else {
|
} else {
|
||||||
err.span_label(
|
err.span_label(
|
||||||
err_label_span,
|
err_label_span,
|
||||||
format!(
|
format!("consider changing this binding's type to be: `{message}`"),
|
||||||
"consider changing this binding's type to be: `{message}`"
|
|
||||||
),
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1359,9 +1351,9 @@ fn suggest_ampmut<'tcx>(
|
|||||||
None => (false, decl_span),
|
None => (false, decl_span),
|
||||||
};
|
};
|
||||||
|
|
||||||
// if the binding already exists and is a reference with a explicit
|
// if the binding already exists and is a reference with an explicit
|
||||||
// lifetime, then we can suggest adding ` mut`. this is special-cased from
|
// lifetime, then we can suggest adding ` mut`. this is special-cased from
|
||||||
// the path without a explicit lifetime.
|
// the path without an explicit lifetime.
|
||||||
if let Ok(src) = tcx.sess.source_map().span_to_snippet(span)
|
if let Ok(src) = tcx.sess.source_map().span_to_snippet(span)
|
||||||
&& src.starts_with("&'")
|
&& src.starts_with("&'")
|
||||||
// note that `& 'a T` is invalid so this is correct.
|
// note that `& 'a T` is invalid so this is correct.
|
||||||
@ -1380,16 +1372,12 @@ fn suggest_ampmut<'tcx>(
|
|||||||
let ty_mut = decl_ty.builtin_deref(true).unwrap();
|
let ty_mut = decl_ty.builtin_deref(true).unwrap();
|
||||||
assert_eq!(ty_mut.mutbl, hir::Mutability::Not);
|
assert_eq!(ty_mut.mutbl, hir::Mutability::Not);
|
||||||
|
|
||||||
(
|
(false, span, format!("{}mut {}", if decl_ty.is_ref() { "&" } else { "*" }, ty_mut.ty))
|
||||||
false,
|
|
||||||
span,
|
|
||||||
format!("{}mut {}", if decl_ty.is_ref() {"&"} else {"*"}, ty_mut.ty)
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_closure_or_generator(ty: Ty<'_>) -> bool {
|
fn is_closure_or_coroutine(ty: Ty<'_>) -> bool {
|
||||||
ty.is_closure() || ty.is_generator()
|
ty.is_closure() || ty.is_coroutine()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Given a field that needs to be mutable, returns a span where the " mut " could go.
|
/// Given a field that needs to be mutable, returns a span where the " mut " could go.
|
||||||
|
|||||||
@ -580,7 +580,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
|||||||
let err = FnMutError {
|
let err = FnMutError {
|
||||||
span: *span,
|
span: *span,
|
||||||
ty_err: match output_ty.kind() {
|
ty_err: match output_ty.kind() {
|
||||||
ty::Generator(def, ..) if self.infcx.tcx.generator_is_async(*def) => {
|
ty::Coroutine(def, ..) if self.infcx.tcx.coroutine_is_async(*def) => {
|
||||||
FnMutReturnTypeErr::ReturnAsyncBlock { span: *span }
|
FnMutReturnTypeErr::ReturnAsyncBlock { span: *span }
|
||||||
}
|
}
|
||||||
_ if output_ty.contains_closure() => {
|
_ if output_ty.contains_closure() => {
|
||||||
@ -1036,7 +1036,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
|||||||
..
|
..
|
||||||
}) => {
|
}) => {
|
||||||
let body = map.body(*body);
|
let body = map.body(*body);
|
||||||
if !matches!(body.generator_kind, Some(hir::GeneratorKind::Async(..))) {
|
if !matches!(body.coroutine_kind, Some(hir::CoroutineKind::Async(..))) {
|
||||||
closure_span = Some(expr.span.shrink_to_lo());
|
closure_span = Some(expr.span.shrink_to_lo());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -41,7 +41,7 @@ pub(crate) enum RegionNameSource {
|
|||||||
AnonRegionFromUpvar(Span, Symbol),
|
AnonRegionFromUpvar(Span, Symbol),
|
||||||
/// The region corresponding to the return type of a closure.
|
/// The region corresponding to the return type of a closure.
|
||||||
AnonRegionFromOutput(RegionNameHighlight, &'static str),
|
AnonRegionFromOutput(RegionNameHighlight, &'static str),
|
||||||
/// The region from a type yielded by a generator.
|
/// The region from a type yielded by a coroutine.
|
||||||
AnonRegionFromYieldTy(Span, String),
|
AnonRegionFromYieldTy(Span, String),
|
||||||
/// An anonymous region from an async fn.
|
/// An anonymous region from an async fn.
|
||||||
AnonRegionFromAsyncFn(Span),
|
AnonRegionFromAsyncFn(Span),
|
||||||
@ -322,7 +322,7 @@ impl<'tcx> MirBorrowckCtxt<'_, 'tcx> {
|
|||||||
let def_ty = self.regioncx.universal_regions().defining_ty;
|
let def_ty = self.regioncx.universal_regions().defining_ty;
|
||||||
|
|
||||||
let DefiningTy::Closure(_, args) = def_ty else {
|
let DefiningTy::Closure(_, args) = def_ty else {
|
||||||
// Can't have BrEnv in functions, constants or generators.
|
// Can't have BrEnv in functions, constants or coroutines.
|
||||||
bug!("BrEnv outside of closure.");
|
bug!("BrEnv outside of closure.");
|
||||||
};
|
};
|
||||||
let hir::ExprKind::Closure(&hir::Closure { fn_decl_span, .. }) =
|
let hir::ExprKind::Closure(&hir::Closure { fn_decl_span, .. }) =
|
||||||
@ -680,16 +680,16 @@ impl<'tcx> MirBorrowckCtxt<'_, 'tcx> {
|
|||||||
}
|
}
|
||||||
hir::FnRetTy::Return(hir_ty) => (fn_decl.output.span(), Some(hir_ty)),
|
hir::FnRetTy::Return(hir_ty) => (fn_decl.output.span(), Some(hir_ty)),
|
||||||
};
|
};
|
||||||
let mir_description = match hir.body(body).generator_kind {
|
let mir_description = match hir.body(body).coroutine_kind {
|
||||||
Some(hir::GeneratorKind::Async(gen)) => match gen {
|
Some(hir::CoroutineKind::Async(gen)) => match gen {
|
||||||
hir::AsyncGeneratorKind::Block => " of async block",
|
hir::CoroutineSource::Block => " of async block",
|
||||||
hir::AsyncGeneratorKind::Closure => " of async closure",
|
hir::CoroutineSource::Closure => " of async closure",
|
||||||
hir::AsyncGeneratorKind::Fn => {
|
hir::CoroutineSource::Fn => {
|
||||||
let parent_item =
|
let parent_item =
|
||||||
hir.get_by_def_id(hir.get_parent_item(mir_hir_id).def_id);
|
hir.get_by_def_id(hir.get_parent_item(mir_hir_id).def_id);
|
||||||
let output = &parent_item
|
let output = &parent_item
|
||||||
.fn_decl()
|
.fn_decl()
|
||||||
.expect("generator lowered from async fn should be in fn")
|
.expect("coroutine lowered from async fn should be in fn")
|
||||||
.output;
|
.output;
|
||||||
span = output.span();
|
span = output.span();
|
||||||
if let hir::FnRetTy::Return(ret) = output {
|
if let hir::FnRetTy::Return(ret) = output {
|
||||||
@ -698,7 +698,21 @@ impl<'tcx> MirBorrowckCtxt<'_, 'tcx> {
|
|||||||
" of async function"
|
" of async function"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
Some(hir::GeneratorKind::Gen) => " of generator",
|
Some(hir::CoroutineKind::Gen(gen)) => match gen {
|
||||||
|
hir::CoroutineSource::Block => " of gen block",
|
||||||
|
hir::CoroutineSource::Closure => " of gen closure",
|
||||||
|
hir::CoroutineSource::Fn => {
|
||||||
|
let parent_item =
|
||||||
|
hir.get_by_def_id(hir.get_parent_item(mir_hir_id).def_id);
|
||||||
|
let output = &parent_item
|
||||||
|
.fn_decl()
|
||||||
|
.expect("coroutine lowered from gen fn should be in fn")
|
||||||
|
.output;
|
||||||
|
span = output.span();
|
||||||
|
" of gen function"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Some(hir::CoroutineKind::Coroutine) => " of coroutine",
|
||||||
None => " of closure",
|
None => " of closure",
|
||||||
};
|
};
|
||||||
(span, mir_description, hir_ty)
|
(span, mir_description, hir_ty)
|
||||||
@ -793,7 +807,7 @@ impl<'tcx> MirBorrowckCtxt<'_, 'tcx> {
|
|||||||
&self,
|
&self,
|
||||||
fr: RegionVid,
|
fr: RegionVid,
|
||||||
) -> Option<RegionName> {
|
) -> Option<RegionName> {
|
||||||
// Note: generators from `async fn` yield `()`, so we don't have to
|
// Note: coroutines from `async fn` yield `()`, so we don't have to
|
||||||
// worry about them here.
|
// worry about them here.
|
||||||
let yield_ty = self.regioncx.universal_regions().yield_ty?;
|
let yield_ty = self.regioncx.universal_regions().yield_ty?;
|
||||||
debug!("give_name_if_anonymous_region_appears_in_yield_ty: yield_ty = {:?}", yield_ty);
|
debug!("give_name_if_anonymous_region_appears_in_yield_ty: yield_ty = {:?}", yield_ty);
|
||||||
@ -942,9 +956,7 @@ impl<'tcx> MirBorrowckCtxt<'_, 'tcx> {
|
|||||||
ty::ClauseKind::Projection(data) if data.projection_ty.self_ty() == ty => {}
|
ty::ClauseKind::Projection(data) if data.projection_ty.self_ty() == ty => {}
|
||||||
_ => return false,
|
_ => return false,
|
||||||
}
|
}
|
||||||
tcx.any_free_region_meets(pred, |r| {
|
tcx.any_free_region_meets(pred, |r| *r == ty::ReEarlyBound(region))
|
||||||
*r == ty::ReEarlyBound(region)
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
false
|
false
|
||||||
|
|||||||
@ -6,8 +6,8 @@ use crate::Upvar;
|
|||||||
use rustc_index::IndexSlice;
|
use rustc_index::IndexSlice;
|
||||||
use rustc_middle::mir::{Body, Local};
|
use rustc_middle::mir::{Body, Local};
|
||||||
use rustc_middle::ty::{RegionVid, TyCtxt};
|
use rustc_middle::ty::{RegionVid, TyCtxt};
|
||||||
use rustc_span::source_map::Span;
|
|
||||||
use rustc_span::symbol::Symbol;
|
use rustc_span::symbol::Symbol;
|
||||||
|
use rustc_span::Span;
|
||||||
|
|
||||||
impl<'tcx> RegionInferenceContext<'tcx> {
|
impl<'tcx> RegionInferenceContext<'tcx> {
|
||||||
pub(crate) fn get_var_name_and_span_for_region(
|
pub(crate) fn get_var_name_and_span_for_region(
|
||||||
|
|||||||
@ -41,7 +41,8 @@ pub(crate) trait AllFactsExt {
|
|||||||
impl AllFactsExt for AllFacts {
|
impl AllFactsExt for AllFacts {
|
||||||
/// Return
|
/// Return
|
||||||
fn enabled(tcx: TyCtxt<'_>) -> bool {
|
fn enabled(tcx: TyCtxt<'_>) -> bool {
|
||||||
tcx.sess.opts.unstable_opts.nll_facts || tcx.sess.opts.unstable_opts.polonius
|
tcx.sess.opts.unstable_opts.nll_facts
|
||||||
|
|| tcx.sess.opts.unstable_opts.polonius.is_legacy_enabled()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write_to_dir(
|
fn write_to_dir(
|
||||||
|
|||||||
@ -161,7 +161,7 @@ impl<'cx, 'tcx> Visitor<'tcx> for InvalidationGenerator<'cx, 'tcx> {
|
|||||||
}
|
}
|
||||||
TerminatorKind::UnwindResume
|
TerminatorKind::UnwindResume
|
||||||
| TerminatorKind::Return
|
| TerminatorKind::Return
|
||||||
| TerminatorKind::GeneratorDrop => {
|
| TerminatorKind::CoroutineDrop => {
|
||||||
// Invalidate all borrows of local places
|
// Invalidate all borrows of local places
|
||||||
let borrow_set = self.borrow_set;
|
let borrow_set = self.borrow_set;
|
||||||
let start = self.location_table.start_index(location);
|
let start = self.location_table.start_index(location);
|
||||||
|
|||||||
@ -1,5 +1,8 @@
|
|||||||
//! This query borrow-checks the MIR to (further) ensure it is not broken.
|
//! This query borrow-checks the MIR to (further) ensure it is not broken.
|
||||||
|
|
||||||
|
#![allow(internal_features)]
|
||||||
|
#![cfg_attr(not(bootstrap), feature(rustdoc_internals))]
|
||||||
|
#![cfg_attr(not(bootstrap), doc(rust_logo))]
|
||||||
#![feature(associated_type_bounds)]
|
#![feature(associated_type_bounds)]
|
||||||
#![feature(box_patterns)]
|
#![feature(box_patterns)]
|
||||||
#![feature(let_chains)]
|
#![feature(let_chains)]
|
||||||
@ -11,7 +14,6 @@
|
|||||||
#![feature(trusted_step)]
|
#![feature(trusted_step)]
|
||||||
#![feature(try_blocks)]
|
#![feature(try_blocks)]
|
||||||
#![recursion_limit = "256"]
|
#![recursion_limit = "256"]
|
||||||
#![allow(internal_features)]
|
|
||||||
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate rustc_middle;
|
extern crate rustc_middle;
|
||||||
@ -29,13 +31,8 @@ use rustc_index::{IndexSlice, IndexVec};
|
|||||||
use rustc_infer::infer::{
|
use rustc_infer::infer::{
|
||||||
InferCtxt, NllRegionVariableOrigin, RegionVariableOrigin, TyCtxtInferExt,
|
InferCtxt, NllRegionVariableOrigin, RegionVariableOrigin, TyCtxtInferExt,
|
||||||
};
|
};
|
||||||
use rustc_middle::mir::{
|
use rustc_middle::mir::tcx::PlaceTy;
|
||||||
traversal, Body, ClearCrossCrate, Local, Location, MutBorrowKind, Mutability,
|
use rustc_middle::mir::*;
|
||||||
NonDivergingIntrinsic, Operand, Place, PlaceElem, PlaceRef, VarDebugInfoContents,
|
|
||||||
};
|
|
||||||
use rustc_middle::mir::{AggregateKind, BasicBlock, BorrowCheckResult, BorrowKind};
|
|
||||||
use rustc_middle::mir::{InlineAsmOperand, Terminator, TerminatorKind};
|
|
||||||
use rustc_middle::mir::{ProjectionElem, Promoted, Rvalue, Statement, StatementKind};
|
|
||||||
use rustc_middle::query::Providers;
|
use rustc_middle::query::Providers;
|
||||||
use rustc_middle::traits::DefiningAnchor;
|
use rustc_middle::traits::DefiningAnchor;
|
||||||
use rustc_middle::ty::{self, CapturedPlace, ParamEnv, RegionVid, TyCtxt};
|
use rustc_middle::ty::{self, CapturedPlace, ParamEnv, RegionVid, TyCtxt};
|
||||||
@ -53,13 +50,13 @@ use rustc_mir_dataflow::impls::{
|
|||||||
EverInitializedPlaces, MaybeInitializedPlaces, MaybeUninitializedPlaces,
|
EverInitializedPlaces, MaybeInitializedPlaces, MaybeUninitializedPlaces,
|
||||||
};
|
};
|
||||||
use rustc_mir_dataflow::move_paths::{InitIndex, MoveOutIndex, MovePathIndex};
|
use rustc_mir_dataflow::move_paths::{InitIndex, MoveOutIndex, MovePathIndex};
|
||||||
use rustc_mir_dataflow::move_paths::{InitLocation, LookupResult, MoveData, MoveError};
|
use rustc_mir_dataflow::move_paths::{InitLocation, LookupResult, MoveData};
|
||||||
use rustc_mir_dataflow::Analysis;
|
use rustc_mir_dataflow::Analysis;
|
||||||
use rustc_mir_dataflow::MoveDataParamEnv;
|
use rustc_mir_dataflow::MoveDataParamEnv;
|
||||||
|
|
||||||
use crate::session_diagnostics::VarNeedNotMut;
|
use crate::session_diagnostics::VarNeedNotMut;
|
||||||
|
|
||||||
use self::diagnostics::{AccessKind, RegionName};
|
use self::diagnostics::{AccessKind, IllegalMoveOriginKind, MoveError, RegionName};
|
||||||
use self::location::LocationTable;
|
use self::location::LocationTable;
|
||||||
use self::prefixes::PrefixSet;
|
use self::prefixes::PrefixSet;
|
||||||
use consumers::{BodyWithBorrowckFacts, ConsumerOptions};
|
use consumers::{BodyWithBorrowckFacts, ConsumerOptions};
|
||||||
@ -173,7 +170,9 @@ fn do_mir_borrowck<'tcx>(
|
|||||||
for var_debug_info in &input_body.var_debug_info {
|
for var_debug_info in &input_body.var_debug_info {
|
||||||
if let VarDebugInfoContents::Place(place) = var_debug_info.value {
|
if let VarDebugInfoContents::Place(place) = var_debug_info.value {
|
||||||
if let Some(local) = place.as_local() {
|
if let Some(local) = place.as_local() {
|
||||||
if let Some(prev_name) = local_names[local] && var_debug_info.name != prev_name {
|
if let Some(prev_name) = local_names[local]
|
||||||
|
&& var_debug_info.name != prev_name
|
||||||
|
{
|
||||||
span_bug!(
|
span_bug!(
|
||||||
var_debug_info.source_info.span,
|
var_debug_info.source_info.span,
|
||||||
"local {:?} has many names (`{}` vs `{}`)",
|
"local {:?} has many names (`{}` vs `{}`)",
|
||||||
@ -220,14 +219,10 @@ fn do_mir_borrowck<'tcx>(
|
|||||||
let location_table_owned = LocationTable::new(body);
|
let location_table_owned = LocationTable::new(body);
|
||||||
let location_table = &location_table_owned;
|
let location_table = &location_table_owned;
|
||||||
|
|
||||||
let (move_data, move_errors): (MoveData<'tcx>, Vec<(Place<'tcx>, MoveError<'tcx>)>) =
|
let move_data = MoveData::gather_moves(&body, tcx, param_env, |_| true);
|
||||||
match MoveData::gather_moves(&body, tcx, param_env) {
|
let promoted_move_data = promoted
|
||||||
Ok(move_data) => (move_data, Vec::new()),
|
|
||||||
Err((move_data, move_errors)) => (move_data, move_errors),
|
|
||||||
};
|
|
||||||
let promoted_errors = promoted
|
|
||||||
.iter_enumerated()
|
.iter_enumerated()
|
||||||
.map(|(idx, body)| (idx, MoveData::gather_moves(&body, tcx, param_env)));
|
.map(|(idx, body)| (idx, MoveData::gather_moves(&body, tcx, param_env, |_| true)));
|
||||||
|
|
||||||
let mdpe = MoveDataParamEnv { move_data, param_env };
|
let mdpe = MoveDataParamEnv { move_data, param_env };
|
||||||
|
|
||||||
@ -298,47 +293,60 @@ fn do_mir_borrowck<'tcx>(
|
|||||||
.pass_name("borrowck")
|
.pass_name("borrowck")
|
||||||
.iterate_to_fixpoint();
|
.iterate_to_fixpoint();
|
||||||
|
|
||||||
let movable_generator =
|
let movable_coroutine =
|
||||||
// The first argument is the generator type passed by value
|
// The first argument is the coroutine type passed by value
|
||||||
if let Some(local) = body.local_decls.raw.get(1)
|
if let Some(local) = body.local_decls.raw.get(1)
|
||||||
// Get the interior types and args which typeck computed
|
// Get the interior types and args which typeck computed
|
||||||
&& let ty::Generator(_, _, hir::Movability::Static) = local.ty.kind()
|
&& let ty::Coroutine(_, _, hir::Movability::Static) = local.ty.kind()
|
||||||
{
|
{
|
||||||
false
|
false
|
||||||
} else {
|
} else {
|
||||||
true
|
true
|
||||||
};
|
};
|
||||||
|
|
||||||
for (idx, move_data_results) in promoted_errors {
|
for (idx, move_data) in promoted_move_data {
|
||||||
let promoted_body = &promoted[idx];
|
use rustc_middle::mir::visit::Visitor;
|
||||||
|
|
||||||
if let Err((move_data, move_errors)) = move_data_results {
|
let promoted_body = &promoted[idx];
|
||||||
let mut promoted_mbcx = MirBorrowckCtxt {
|
let mut promoted_mbcx = MirBorrowckCtxt {
|
||||||
infcx: &infcx,
|
infcx: &infcx,
|
||||||
param_env,
|
param_env,
|
||||||
body: promoted_body,
|
body: promoted_body,
|
||||||
move_data: &move_data,
|
move_data: &move_data,
|
||||||
location_table, // no need to create a real one for the promoted, it is not used
|
location_table, // no need to create a real one for the promoted, it is not used
|
||||||
movable_generator,
|
movable_coroutine,
|
||||||
fn_self_span_reported: Default::default(),
|
fn_self_span_reported: Default::default(),
|
||||||
locals_are_invalidated_at_exit,
|
locals_are_invalidated_at_exit,
|
||||||
access_place_error_reported: Default::default(),
|
access_place_error_reported: Default::default(),
|
||||||
reservation_error_reported: Default::default(),
|
reservation_error_reported: Default::default(),
|
||||||
uninitialized_error_reported: Default::default(),
|
uninitialized_error_reported: Default::default(),
|
||||||
regioncx: regioncx.clone(),
|
regioncx: regioncx.clone(),
|
||||||
used_mut: Default::default(),
|
used_mut: Default::default(),
|
||||||
used_mut_upvars: SmallVec::new(),
|
used_mut_upvars: SmallVec::new(),
|
||||||
borrow_set: Rc::clone(&borrow_set),
|
borrow_set: Rc::clone(&borrow_set),
|
||||||
upvars: Vec::new(),
|
upvars: Vec::new(),
|
||||||
local_names: IndexVec::from_elem(None, &promoted_body.local_decls),
|
local_names: IndexVec::from_elem(None, &promoted_body.local_decls),
|
||||||
region_names: RefCell::default(),
|
region_names: RefCell::default(),
|
||||||
next_region_name: RefCell::new(1),
|
next_region_name: RefCell::new(1),
|
||||||
polonius_output: None,
|
polonius_output: None,
|
||||||
errors,
|
move_errors: Vec::new(),
|
||||||
};
|
errors,
|
||||||
promoted_mbcx.report_move_errors(move_errors);
|
|
||||||
errors = promoted_mbcx.errors;
|
|
||||||
};
|
};
|
||||||
|
MoveVisitor { ctxt: &mut promoted_mbcx }.visit_body(promoted_body);
|
||||||
|
promoted_mbcx.report_move_errors();
|
||||||
|
errors = promoted_mbcx.errors;
|
||||||
|
|
||||||
|
struct MoveVisitor<'a, 'cx, 'tcx> {
|
||||||
|
ctxt: &'a mut MirBorrowckCtxt<'cx, 'tcx>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'tcx> Visitor<'tcx> for MoveVisitor<'_, '_, 'tcx> {
|
||||||
|
fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) {
|
||||||
|
if let Operand::Move(place) = operand {
|
||||||
|
self.ctxt.check_movable_place(location, *place);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut mbcx = MirBorrowckCtxt {
|
let mut mbcx = MirBorrowckCtxt {
|
||||||
@ -347,7 +355,7 @@ fn do_mir_borrowck<'tcx>(
|
|||||||
body,
|
body,
|
||||||
move_data: &mdpe.move_data,
|
move_data: &mdpe.move_data,
|
||||||
location_table,
|
location_table,
|
||||||
movable_generator,
|
movable_coroutine,
|
||||||
locals_are_invalidated_at_exit,
|
locals_are_invalidated_at_exit,
|
||||||
fn_self_span_reported: Default::default(),
|
fn_self_span_reported: Default::default(),
|
||||||
access_place_error_reported: Default::default(),
|
access_place_error_reported: Default::default(),
|
||||||
@ -362,6 +370,7 @@ fn do_mir_borrowck<'tcx>(
|
|||||||
region_names: RefCell::default(),
|
region_names: RefCell::default(),
|
||||||
next_region_name: RefCell::new(1),
|
next_region_name: RefCell::new(1),
|
||||||
polonius_output,
|
polonius_output,
|
||||||
|
move_errors: Vec::new(),
|
||||||
errors,
|
errors,
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -374,8 +383,6 @@ fn do_mir_borrowck<'tcx>(
|
|||||||
borrows: flow_borrows,
|
borrows: flow_borrows,
|
||||||
};
|
};
|
||||||
|
|
||||||
mbcx.report_move_errors(move_errors);
|
|
||||||
|
|
||||||
rustc_mir_dataflow::visit_results(
|
rustc_mir_dataflow::visit_results(
|
||||||
body,
|
body,
|
||||||
traversal::reverse_postorder(body).map(|(bb, _)| bb),
|
traversal::reverse_postorder(body).map(|(bb, _)| bb),
|
||||||
@ -383,6 +390,8 @@ fn do_mir_borrowck<'tcx>(
|
|||||||
&mut mbcx,
|
&mut mbcx,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
mbcx.report_move_errors();
|
||||||
|
|
||||||
// For each non-user used mutable variable, check if it's been assigned from
|
// For each non-user used mutable variable, check if it's been assigned from
|
||||||
// a user-declared local. If so, then put that local into the used_mut set.
|
// a user-declared local. If so, then put that local into the used_mut set.
|
||||||
// Note that this set is expected to be small - only upvars from closures
|
// Note that this set is expected to be small - only upvars from closures
|
||||||
@ -532,7 +541,7 @@ struct MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
/// when MIR borrowck begins.
|
/// when MIR borrowck begins.
|
||||||
location_table: &'cx LocationTable,
|
location_table: &'cx LocationTable,
|
||||||
|
|
||||||
movable_generator: bool,
|
movable_coroutine: bool,
|
||||||
/// This keeps track of whether local variables are free-ed when the function
|
/// This keeps track of whether local variables are free-ed when the function
|
||||||
/// exits even without a `StorageDead`, which appears to be the case for
|
/// exits even without a `StorageDead`, which appears to be the case for
|
||||||
/// constants.
|
/// constants.
|
||||||
@ -591,6 +600,7 @@ struct MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
polonius_output: Option<Rc<PoloniusOutput>>,
|
polonius_output: Option<Rc<PoloniusOutput>>,
|
||||||
|
|
||||||
errors: error::BorrowckErrors<'tcx>,
|
errors: error::BorrowckErrors<'tcx>,
|
||||||
|
move_errors: Vec<MoveError<'tcx>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check that:
|
// Check that:
|
||||||
@ -721,7 +731,6 @@ impl<'cx, 'tcx, R> rustc_mir_dataflow::ResultsVisitor<'cx, 'tcx, R> for MirBorro
|
|||||||
}
|
}
|
||||||
TerminatorKind::Assert { cond, expected: _, msg, target: _, unwind: _ } => {
|
TerminatorKind::Assert { cond, expected: _, msg, target: _, unwind: _ } => {
|
||||||
self.consume_operand(loc, (cond, span), flow_state);
|
self.consume_operand(loc, (cond, span), flow_state);
|
||||||
use rustc_middle::mir::AssertKind;
|
|
||||||
if let AssertKind::BoundsCheck { len, index } = &**msg {
|
if let AssertKind::BoundsCheck { len, index } = &**msg {
|
||||||
self.consume_operand(loc, (len, span), flow_state);
|
self.consume_operand(loc, (len, span), flow_state);
|
||||||
self.consume_operand(loc, (index, span), flow_state);
|
self.consume_operand(loc, (index, span), flow_state);
|
||||||
@ -774,7 +783,7 @@ impl<'cx, 'tcx, R> rustc_mir_dataflow::ResultsVisitor<'cx, 'tcx, R> for MirBorro
|
|||||||
| TerminatorKind::Unreachable
|
| TerminatorKind::Unreachable
|
||||||
| TerminatorKind::UnwindResume
|
| TerminatorKind::UnwindResume
|
||||||
| TerminatorKind::Return
|
| TerminatorKind::Return
|
||||||
| TerminatorKind::GeneratorDrop
|
| TerminatorKind::CoroutineDrop
|
||||||
| TerminatorKind::FalseEdge { real_target: _, imaginary_target: _ }
|
| TerminatorKind::FalseEdge { real_target: _, imaginary_target: _ }
|
||||||
| TerminatorKind::FalseUnwind { real_target: _, unwind: _ } => {
|
| TerminatorKind::FalseUnwind { real_target: _, unwind: _ } => {
|
||||||
// no data used, thus irrelevant to borrowck
|
// no data used, thus irrelevant to borrowck
|
||||||
@ -793,7 +802,7 @@ impl<'cx, 'tcx, R> rustc_mir_dataflow::ResultsVisitor<'cx, 'tcx, R> for MirBorro
|
|||||||
|
|
||||||
match term.kind {
|
match term.kind {
|
||||||
TerminatorKind::Yield { value: _, resume: _, resume_arg: _, drop: _ } => {
|
TerminatorKind::Yield { value: _, resume: _, resume_arg: _, drop: _ } => {
|
||||||
if self.movable_generator {
|
if self.movable_coroutine {
|
||||||
// Look for any active borrows to locals
|
// Look for any active borrows to locals
|
||||||
let borrow_set = self.borrow_set.clone();
|
let borrow_set = self.borrow_set.clone();
|
||||||
for i in flow_state.borrows.iter() {
|
for i in flow_state.borrows.iter() {
|
||||||
@ -805,7 +814,7 @@ impl<'cx, 'tcx, R> rustc_mir_dataflow::ResultsVisitor<'cx, 'tcx, R> for MirBorro
|
|||||||
|
|
||||||
TerminatorKind::UnwindResume
|
TerminatorKind::UnwindResume
|
||||||
| TerminatorKind::Return
|
| TerminatorKind::Return
|
||||||
| TerminatorKind::GeneratorDrop => {
|
| TerminatorKind::CoroutineDrop => {
|
||||||
// Returning from the function implicitly kills storage for all locals and statics.
|
// Returning from the function implicitly kills storage for all locals and statics.
|
||||||
// Often, the storage will already have been killed by an explicit
|
// Often, the storage will already have been killed by an explicit
|
||||||
// StorageDead, but we don't always emit those (notably on unwind paths),
|
// StorageDead, but we don't always emit those (notably on unwind paths),
|
||||||
@ -1322,7 +1331,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
// moved into the closure and subsequently used by the closure,
|
// moved into the closure and subsequently used by the closure,
|
||||||
// in order to populate our used_mut set.
|
// in order to populate our used_mut set.
|
||||||
match **aggregate_kind {
|
match **aggregate_kind {
|
||||||
AggregateKind::Closure(def_id, _) | AggregateKind::Generator(def_id, _, _) => {
|
AggregateKind::Closure(def_id, _) | AggregateKind::Coroutine(def_id, _, _) => {
|
||||||
let def_id = def_id.expect_local();
|
let def_id = def_id.expect_local();
|
||||||
let BorrowCheckResult { used_mut_upvars, .. } =
|
let BorrowCheckResult { used_mut_upvars, .. } =
|
||||||
self.infcx.tcx.mir_borrowck(def_id);
|
self.infcx.tcx.mir_borrowck(def_id);
|
||||||
@ -1405,7 +1414,9 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
// As such we have to search for the local that this
|
// As such we have to search for the local that this
|
||||||
// capture comes from and mark it as being used as mut.
|
// capture comes from and mark it as being used as mut.
|
||||||
|
|
||||||
let temp_mpi = self.move_data.rev_lookup.find_local(local);
|
let Some(temp_mpi) = self.move_data.rev_lookup.find_local(local) else {
|
||||||
|
bug!("temporary should be tracked");
|
||||||
|
};
|
||||||
let init = if let [init_index] = *self.move_data.init_path_map[temp_mpi] {
|
let init = if let [init_index] = *self.move_data.init_path_map[temp_mpi] {
|
||||||
&self.move_data.inits[init_index]
|
&self.move_data.inits[init_index]
|
||||||
} else {
|
} else {
|
||||||
@ -1465,6 +1476,9 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
Operand::Move(place) => {
|
Operand::Move(place) => {
|
||||||
|
// Check if moving from this place makes sense.
|
||||||
|
self.check_movable_place(location, place);
|
||||||
|
|
||||||
// move of place: check if this is move of already borrowed path
|
// move of place: check if this is move of already borrowed path
|
||||||
self.access_place(
|
self.access_place(
|
||||||
location,
|
location,
|
||||||
@ -1545,12 +1559,12 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Reports an error if this is a borrow of local data.
|
/// Reports an error if this is a borrow of local data.
|
||||||
/// This is called for all Yield expressions on movable generators
|
/// This is called for all Yield expressions on movable coroutines
|
||||||
fn check_for_local_borrow(&mut self, borrow: &BorrowData<'tcx>, yield_span: Span) {
|
fn check_for_local_borrow(&mut self, borrow: &BorrowData<'tcx>, yield_span: Span) {
|
||||||
debug!("check_for_local_borrow({:?})", borrow);
|
debug!("check_for_local_borrow({:?})", borrow);
|
||||||
|
|
||||||
if borrow_of_local_data(borrow.borrowed_place) {
|
if borrow_of_local_data(borrow.borrowed_place) {
|
||||||
let err = self.cannot_borrow_across_generator_yield(
|
let err = self.cannot_borrow_across_coroutine_yield(
|
||||||
self.retrieve_borrow_spans(borrow).var_or_use(),
|
self.retrieve_borrow_spans(borrow).var_or_use(),
|
||||||
yield_span,
|
yield_span,
|
||||||
);
|
);
|
||||||
@ -1586,6 +1600,131 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn check_movable_place(&mut self, location: Location, place: Place<'tcx>) {
|
||||||
|
use IllegalMoveOriginKind::*;
|
||||||
|
|
||||||
|
let body = self.body;
|
||||||
|
let tcx = self.infcx.tcx;
|
||||||
|
let mut place_ty = PlaceTy::from_ty(body.local_decls[place.local].ty);
|
||||||
|
for (place_ref, elem) in place.iter_projections() {
|
||||||
|
match elem {
|
||||||
|
ProjectionElem::Deref => match place_ty.ty.kind() {
|
||||||
|
ty::Ref(..) | ty::RawPtr(..) => {
|
||||||
|
self.move_errors.push(MoveError::new(
|
||||||
|
place,
|
||||||
|
location,
|
||||||
|
BorrowedContent {
|
||||||
|
target_place: place_ref.project_deeper(&[elem], tcx),
|
||||||
|
},
|
||||||
|
));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
ty::Adt(adt, _) => {
|
||||||
|
if !adt.is_box() {
|
||||||
|
bug!("Adt should be a box type when Place is deref");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ty::Bool
|
||||||
|
| ty::Char
|
||||||
|
| ty::Int(_)
|
||||||
|
| ty::Uint(_)
|
||||||
|
| ty::Float(_)
|
||||||
|
| ty::Foreign(_)
|
||||||
|
| ty::Str
|
||||||
|
| ty::Array(_, _)
|
||||||
|
| ty::Slice(_)
|
||||||
|
| ty::FnDef(_, _)
|
||||||
|
| ty::FnPtr(_)
|
||||||
|
| ty::Dynamic(_, _, _)
|
||||||
|
| ty::Closure(_, _)
|
||||||
|
| ty::Coroutine(_, _, _)
|
||||||
|
| ty::CoroutineWitness(..)
|
||||||
|
| ty::Never
|
||||||
|
| ty::Tuple(_)
|
||||||
|
| ty::Alias(_, _)
|
||||||
|
| ty::Param(_)
|
||||||
|
| ty::Bound(_, _)
|
||||||
|
| ty::Infer(_)
|
||||||
|
| ty::Error(_)
|
||||||
|
| ty::Placeholder(_) => {
|
||||||
|
bug!("When Place is Deref it's type shouldn't be {place_ty:#?}")
|
||||||
|
}
|
||||||
|
},
|
||||||
|
ProjectionElem::Field(_, _) => match place_ty.ty.kind() {
|
||||||
|
ty::Adt(adt, _) => {
|
||||||
|
if adt.has_dtor(tcx) {
|
||||||
|
self.move_errors.push(MoveError::new(
|
||||||
|
place,
|
||||||
|
location,
|
||||||
|
InteriorOfTypeWithDestructor { container_ty: place_ty.ty },
|
||||||
|
));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ty::Closure(_, _) | ty::Coroutine(_, _, _) | ty::Tuple(_) => (),
|
||||||
|
ty::Bool
|
||||||
|
| ty::Char
|
||||||
|
| ty::Int(_)
|
||||||
|
| ty::Uint(_)
|
||||||
|
| ty::Float(_)
|
||||||
|
| ty::Foreign(_)
|
||||||
|
| ty::Str
|
||||||
|
| ty::Array(_, _)
|
||||||
|
| ty::Slice(_)
|
||||||
|
| ty::RawPtr(_)
|
||||||
|
| ty::Ref(_, _, _)
|
||||||
|
| ty::FnDef(_, _)
|
||||||
|
| ty::FnPtr(_)
|
||||||
|
| ty::Dynamic(_, _, _)
|
||||||
|
| ty::CoroutineWitness(..)
|
||||||
|
| ty::Never
|
||||||
|
| ty::Alias(_, _)
|
||||||
|
| ty::Param(_)
|
||||||
|
| ty::Bound(_, _)
|
||||||
|
| ty::Infer(_)
|
||||||
|
| ty::Error(_)
|
||||||
|
| ty::Placeholder(_) => bug!(
|
||||||
|
"When Place contains ProjectionElem::Field it's type shouldn't be {place_ty:#?}"
|
||||||
|
),
|
||||||
|
},
|
||||||
|
ProjectionElem::ConstantIndex { .. } | ProjectionElem::Subslice { .. } => {
|
||||||
|
match place_ty.ty.kind() {
|
||||||
|
ty::Slice(_) => {
|
||||||
|
self.move_errors.push(MoveError::new(
|
||||||
|
place,
|
||||||
|
location,
|
||||||
|
InteriorOfSliceOrArray { ty: place_ty.ty, is_index: false },
|
||||||
|
));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
ty::Array(_, _) => (),
|
||||||
|
_ => bug!("Unexpected type {:#?}", place_ty.ty),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ProjectionElem::Index(_) => match place_ty.ty.kind() {
|
||||||
|
ty::Array(..) | ty::Slice(..) => {
|
||||||
|
self.move_errors.push(MoveError::new(
|
||||||
|
place,
|
||||||
|
location,
|
||||||
|
InteriorOfSliceOrArray { ty: place_ty.ty, is_index: true },
|
||||||
|
));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
_ => bug!("Unexpected type {place_ty:#?}"),
|
||||||
|
},
|
||||||
|
// `OpaqueCast`: only transmutes the type, so no moves there.
|
||||||
|
// `Downcast` : only changes information about a `Place` without moving.
|
||||||
|
// `Subtype` : only transmutes the type, so no moves.
|
||||||
|
// So it's safe to skip these.
|
||||||
|
ProjectionElem::OpaqueCast(_)
|
||||||
|
| ProjectionElem::Subtype(_)
|
||||||
|
| ProjectionElem::Downcast(_, _) => (),
|
||||||
|
}
|
||||||
|
|
||||||
|
place_ty = place_ty.projection_ty(tcx, elem);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn check_if_full_path_is_moved(
|
fn check_if_full_path_is_moved(
|
||||||
&mut self,
|
&mut self,
|
||||||
location: Location,
|
location: Location,
|
||||||
@ -1967,7 +2106,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
Reservation(WriteKind::MutableBorrow(BorrowKind::Mut { kind: mut_borrow_kind }))
|
Reservation(WriteKind::MutableBorrow(BorrowKind::Mut { kind: mut_borrow_kind }))
|
||||||
| Write(WriteKind::MutableBorrow(BorrowKind::Mut { kind: mut_borrow_kind })) => {
|
| Write(WriteKind::MutableBorrow(BorrowKind::Mut { kind: mut_borrow_kind })) => {
|
||||||
let is_local_mutation_allowed = match mut_borrow_kind {
|
let is_local_mutation_allowed = match mut_borrow_kind {
|
||||||
// `ClosureCapture` is used for mutable variable with a immutable binding.
|
// `ClosureCapture` is used for mutable variable with an immutable binding.
|
||||||
// This is only behaviour difference between `ClosureCapture` and mutable borrows.
|
// This is only behaviour difference between `ClosureCapture` and mutable borrows.
|
||||||
MutBorrowKind::ClosureCapture => LocalMutationIsAllowed::Yes,
|
MutBorrowKind::ClosureCapture => LocalMutationIsAllowed::Yes,
|
||||||
MutBorrowKind::Default | MutBorrowKind::TwoPhaseBorrow => {
|
MutBorrowKind::Default | MutBorrowKind::TwoPhaseBorrow => {
|
||||||
@ -2070,7 +2209,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
local: Local,
|
local: Local,
|
||||||
flow_state: &Flows<'cx, 'tcx>,
|
flow_state: &Flows<'cx, 'tcx>,
|
||||||
) -> Option<InitIndex> {
|
) -> Option<InitIndex> {
|
||||||
let mpi = self.move_data.rev_lookup.find_local(local);
|
let mpi = self.move_data.rev_lookup.find_local(local)?;
|
||||||
let ii = &self.move_data.init_path_map[mpi];
|
let ii = &self.move_data.init_path_map[mpi];
|
||||||
ii.into_iter().find(|&&index| flow_state.ever_inits.contains(index)).copied()
|
ii.into_iter().find(|&&index| flow_state.ever_inits.contains(index)).copied()
|
||||||
}
|
}
|
||||||
|
|||||||
@ -169,10 +169,11 @@ pub(crate) fn compute_regions<'cx, 'tcx>(
|
|||||||
upvars: &[Upvar<'tcx>],
|
upvars: &[Upvar<'tcx>],
|
||||||
consumer_options: Option<ConsumerOptions>,
|
consumer_options: Option<ConsumerOptions>,
|
||||||
) -> NllOutput<'tcx> {
|
) -> NllOutput<'tcx> {
|
||||||
|
let is_polonius_legacy_enabled = infcx.tcx.sess.opts.unstable_opts.polonius.is_legacy_enabled();
|
||||||
let polonius_input = consumer_options.map(|c| c.polonius_input()).unwrap_or_default()
|
let polonius_input = consumer_options.map(|c| c.polonius_input()).unwrap_or_default()
|
||||||
|| infcx.tcx.sess.opts.unstable_opts.polonius;
|
|| is_polonius_legacy_enabled;
|
||||||
let polonius_output = consumer_options.map(|c| c.polonius_output()).unwrap_or_default()
|
let polonius_output = consumer_options.map(|c| c.polonius_output()).unwrap_or_default()
|
||||||
|| infcx.tcx.sess.opts.unstable_opts.polonius;
|
|| is_polonius_legacy_enabled;
|
||||||
let mut all_facts =
|
let mut all_facts =
|
||||||
(polonius_input || AllFacts::enabled(infcx.tcx)).then_some(AllFacts::default());
|
(polonius_input || AllFacts::enabled(infcx.tcx)).then_some(AllFacts::default());
|
||||||
|
|
||||||
@ -181,22 +182,26 @@ pub(crate) fn compute_regions<'cx, 'tcx>(
|
|||||||
let elements = &Rc::new(RegionValueElements::new(&body));
|
let elements = &Rc::new(RegionValueElements::new(&body));
|
||||||
|
|
||||||
// Run the MIR type-checker.
|
// Run the MIR type-checker.
|
||||||
let MirTypeckResults { constraints, universal_region_relations, opaque_type_values } =
|
let MirTypeckResults {
|
||||||
type_check::type_check(
|
constraints,
|
||||||
infcx,
|
universal_region_relations,
|
||||||
param_env,
|
opaque_type_values,
|
||||||
body,
|
live_loans,
|
||||||
promoted,
|
} = type_check::type_check(
|
||||||
&universal_regions,
|
infcx,
|
||||||
location_table,
|
param_env,
|
||||||
borrow_set,
|
body,
|
||||||
&mut all_facts,
|
promoted,
|
||||||
flow_inits,
|
&universal_regions,
|
||||||
move_data,
|
location_table,
|
||||||
elements,
|
borrow_set,
|
||||||
upvars,
|
&mut all_facts,
|
||||||
polonius_input,
|
flow_inits,
|
||||||
);
|
move_data,
|
||||||
|
elements,
|
||||||
|
upvars,
|
||||||
|
polonius_input,
|
||||||
|
);
|
||||||
|
|
||||||
if let Some(all_facts) = &mut all_facts {
|
if let Some(all_facts) = &mut all_facts {
|
||||||
let _prof_timer = infcx.tcx.prof.generic_activity("polonius_fact_generation");
|
let _prof_timer = infcx.tcx.prof.generic_activity("polonius_fact_generation");
|
||||||
@ -274,6 +279,7 @@ pub(crate) fn compute_regions<'cx, 'tcx>(
|
|||||||
type_tests,
|
type_tests,
|
||||||
liveness_constraints,
|
liveness_constraints,
|
||||||
elements,
|
elements,
|
||||||
|
live_loans,
|
||||||
);
|
);
|
||||||
|
|
||||||
// Generate various additional constraints.
|
// Generate various additional constraints.
|
||||||
|
|||||||
@ -137,7 +137,7 @@ pub(super) fn is_active<'tcx>(
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Determines if a given borrow is borrowing local data
|
/// Determines if a given borrow is borrowing local data
|
||||||
/// This is called for all Yield expressions on movable generators
|
/// This is called for all Yield expressions on movable coroutines
|
||||||
pub(super) fn borrow_of_local_data(place: Place<'_>) -> bool {
|
pub(super) fn borrow_of_local_data(place: Place<'_>) -> bool {
|
||||||
// Reborrow of already borrowed data is ignored
|
// Reborrow of already borrowed data is ignored
|
||||||
// Any errors will be caught on the initial borrow
|
// Any errors will be caught on the initial borrow
|
||||||
@ -165,7 +165,7 @@ pub(crate) fn is_upvar_field_projection<'tcx>(
|
|||||||
match place_ref.last_projection() {
|
match place_ref.last_projection() {
|
||||||
Some((place_base, ProjectionElem::Field(field, _ty))) => {
|
Some((place_base, ProjectionElem::Field(field, _ty))) => {
|
||||||
let base_ty = place_base.ty(body, tcx).ty;
|
let base_ty = place_base.ty(body, tcx).ty;
|
||||||
if (base_ty.is_closure() || base_ty.is_generator())
|
if (base_ty.is_closure() || base_ty.is_coroutine())
|
||||||
&& (!by_ref || upvars[field.index()].by_ref)
|
&& (!by_ref || upvars[field.index()].by_ref)
|
||||||
{
|
{
|
||||||
Some(field)
|
Some(field)
|
||||||
|
|||||||
@ -7,6 +7,7 @@ use rustc_data_structures::fx::{FxIndexMap, FxIndexSet};
|
|||||||
use rustc_data_structures::graph::scc::Sccs;
|
use rustc_data_structures::graph::scc::Sccs;
|
||||||
use rustc_errors::Diagnostic;
|
use rustc_errors::Diagnostic;
|
||||||
use rustc_hir::def_id::CRATE_DEF_ID;
|
use rustc_hir::def_id::CRATE_DEF_ID;
|
||||||
|
use rustc_index::bit_set::SparseBitMatrix;
|
||||||
use rustc_index::{IndexSlice, IndexVec};
|
use rustc_index::{IndexSlice, IndexVec};
|
||||||
use rustc_infer::infer::outlives::test_type_match;
|
use rustc_infer::infer::outlives::test_type_match;
|
||||||
use rustc_infer::infer::region_constraints::{GenericKind, VarInfos, VerifyBound, VerifyIfEq};
|
use rustc_infer::infer::region_constraints::{GenericKind, VarInfos, VerifyBound, VerifyIfEq};
|
||||||
@ -21,17 +22,17 @@ use rustc_middle::traits::ObligationCauseCode;
|
|||||||
use rustc_middle::ty::{self, RegionVid, Ty, TyCtxt, TypeFoldable, TypeVisitableExt};
|
use rustc_middle::ty::{self, RegionVid, Ty, TyCtxt, TypeFoldable, TypeVisitableExt};
|
||||||
use rustc_span::Span;
|
use rustc_span::Span;
|
||||||
|
|
||||||
|
use crate::constraints::graph::{self, NormalConstraintGraph, RegionGraph};
|
||||||
|
use crate::dataflow::BorrowIndex;
|
||||||
use crate::{
|
use crate::{
|
||||||
constraints::{
|
constraints::{ConstraintSccIndex, OutlivesConstraint, OutlivesConstraintSet},
|
||||||
graph::NormalConstraintGraph, ConstraintSccIndex, OutlivesConstraint, OutlivesConstraintSet,
|
|
||||||
},
|
|
||||||
diagnostics::{RegionErrorKind, RegionErrors, UniverseInfo},
|
diagnostics::{RegionErrorKind, RegionErrors, UniverseInfo},
|
||||||
member_constraints::{MemberConstraintSet, NllMemberConstraintIndex},
|
member_constraints::{MemberConstraintSet, NllMemberConstraintIndex},
|
||||||
nll::PoloniusOutput,
|
nll::PoloniusOutput,
|
||||||
region_infer::reverse_sccs::ReverseSccGraph,
|
region_infer::reverse_sccs::ReverseSccGraph,
|
||||||
region_infer::values::{
|
region_infer::values::{
|
||||||
LivenessValues, PlaceholderIndices, RegionElement, RegionValueElements, RegionValues,
|
LivenessValues, PlaceholderIndices, PointIndex, RegionElement, RegionValueElements,
|
||||||
ToElementIndex,
|
RegionValues, ToElementIndex,
|
||||||
},
|
},
|
||||||
type_check::{free_region_relations::UniversalRegionRelations, Locations},
|
type_check::{free_region_relations::UniversalRegionRelations, Locations},
|
||||||
universal_regions::UniversalRegions,
|
universal_regions::UniversalRegions,
|
||||||
@ -119,6 +120,9 @@ pub struct RegionInferenceContext<'tcx> {
|
|||||||
/// Information about how the universally quantified regions in
|
/// Information about how the universally quantified regions in
|
||||||
/// scope on this function relate to one another.
|
/// scope on this function relate to one another.
|
||||||
universal_region_relations: Frozen<UniversalRegionRelations<'tcx>>,
|
universal_region_relations: Frozen<UniversalRegionRelations<'tcx>>,
|
||||||
|
|
||||||
|
/// The set of loans that are live at a given point in the CFG, when using `-Zpolonius=next`.
|
||||||
|
live_loans: SparseBitMatrix<PointIndex, BorrowIndex>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Each time that `apply_member_constraint` is successful, it appends
|
/// Each time that `apply_member_constraint` is successful, it appends
|
||||||
@ -330,6 +334,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
|||||||
type_tests: Vec<TypeTest<'tcx>>,
|
type_tests: Vec<TypeTest<'tcx>>,
|
||||||
liveness_constraints: LivenessValues<RegionVid>,
|
liveness_constraints: LivenessValues<RegionVid>,
|
||||||
elements: &Rc<RegionValueElements>,
|
elements: &Rc<RegionValueElements>,
|
||||||
|
live_loans: SparseBitMatrix<PointIndex, BorrowIndex>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
debug!("universal_regions: {:#?}", universal_regions);
|
debug!("universal_regions: {:#?}", universal_regions);
|
||||||
debug!("outlives constraints: {:#?}", outlives_constraints);
|
debug!("outlives constraints: {:#?}", outlives_constraints);
|
||||||
@ -383,6 +388,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
|||||||
type_tests,
|
type_tests,
|
||||||
universal_regions,
|
universal_regions,
|
||||||
universal_region_relations,
|
universal_region_relations,
|
||||||
|
live_loans,
|
||||||
};
|
};
|
||||||
|
|
||||||
result.init_free_and_bound_regions();
|
result.init_free_and_bound_regions();
|
||||||
@ -637,11 +643,12 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
|||||||
self.scc_universes[scc]
|
self.scc_universes[scc]
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Once region solving has completed, this function will return
|
/// Once region solving has completed, this function will return the member constraints that
|
||||||
/// the member constraints that were applied to the value of a given
|
/// were applied to the value of a given SCC `scc`. See `AppliedMemberConstraint`.
|
||||||
/// region `r`. See `AppliedMemberConstraint`.
|
pub(crate) fn applied_member_constraints(
|
||||||
pub(crate) fn applied_member_constraints(&self, r: RegionVid) -> &[AppliedMemberConstraint] {
|
&self,
|
||||||
let scc = self.constraint_sccs.scc(r);
|
scc: ConstraintSccIndex,
|
||||||
|
) -> &[AppliedMemberConstraint] {
|
||||||
binary_search_util::binary_search_slice(
|
binary_search_util::binary_search_slice(
|
||||||
&self.member_constraints_applied,
|
&self.member_constraints_applied,
|
||||||
|applied| applied.member_region_scc,
|
|applied| applied.member_region_scc,
|
||||||
@ -683,7 +690,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
|||||||
// In Polonius mode, the errors about missing universal region relations are in the output
|
// In Polonius mode, the errors about missing universal region relations are in the output
|
||||||
// and need to be emitted or propagated. Otherwise, we need to check whether the
|
// and need to be emitted or propagated. Otherwise, we need to check whether the
|
||||||
// constraints were too strong, and if so, emit or propagate those errors.
|
// constraints were too strong, and if so, emit or propagate those errors.
|
||||||
if infcx.tcx.sess.opts.unstable_opts.polonius {
|
if infcx.tcx.sess.opts.unstable_opts.polonius.is_legacy_enabled() {
|
||||||
self.check_polonius_subset_errors(
|
self.check_polonius_subset_errors(
|
||||||
outlives_requirements.as_mut(),
|
outlives_requirements.as_mut(),
|
||||||
&mut errors_buffer,
|
&mut errors_buffer,
|
||||||
@ -1938,7 +1945,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
|||||||
// Member constraints can also give rise to `'r: 'x` edges that
|
// Member constraints can also give rise to `'r: 'x` edges that
|
||||||
// were not part of the graph initially, so watch out for those.
|
// were not part of the graph initially, so watch out for those.
|
||||||
// (But they are extremely rare; this loop is very cold.)
|
// (But they are extremely rare; this loop is very cold.)
|
||||||
for constraint in self.applied_member_constraints(r) {
|
for constraint in self.applied_member_constraints(self.constraint_sccs.scc(r)) {
|
||||||
let p_c = &self.member_constraints[constraint.member_constraint_index];
|
let p_c = &self.member_constraints[constraint.member_constraint_index];
|
||||||
let constraint = OutlivesConstraint {
|
let constraint = OutlivesConstraint {
|
||||||
sup: r,
|
sup: r,
|
||||||
@ -2279,6 +2286,38 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
|||||||
}
|
}
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Access to the SCC constraint graph.
|
||||||
|
pub(crate) fn constraint_sccs(&self) -> &Sccs<RegionVid, ConstraintSccIndex> {
|
||||||
|
self.constraint_sccs.as_ref()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Access to the region graph, built from the outlives constraints.
|
||||||
|
pub(crate) fn region_graph(&self) -> RegionGraph<'_, 'tcx, graph::Normal> {
|
||||||
|
self.constraint_graph.region_graph(&self.constraints, self.universal_regions.fr_static)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns whether the given region is considered live at all points: whether it is a
|
||||||
|
/// placeholder or a free region.
|
||||||
|
pub(crate) fn is_region_live_at_all_points(&self, region: RegionVid) -> bool {
|
||||||
|
// FIXME: there must be a cleaner way to find this information. At least, when
|
||||||
|
// higher-ranked subtyping is abstracted away from the borrowck main path, we'll only
|
||||||
|
// need to check whether this is a universal region.
|
||||||
|
let origin = self.region_definition(region).origin;
|
||||||
|
let live_at_all_points = matches!(
|
||||||
|
origin,
|
||||||
|
NllRegionVariableOrigin::Placeholder(_) | NllRegionVariableOrigin::FreeRegion
|
||||||
|
);
|
||||||
|
live_at_all_points
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns whether the `loan_idx` is live at the given `location`: whether its issuing
|
||||||
|
/// region is contained within the type of a variable that is live at this point.
|
||||||
|
/// Note: for now, the sets of live loans is only available when using `-Zpolonius=next`.
|
||||||
|
pub(crate) fn is_loan_live_at(&self, loan_idx: BorrowIndex, location: Location) -> bool {
|
||||||
|
let point = self.liveness_constraints.point_from_location(location);
|
||||||
|
self.live_loans.contains(point, loan_idx)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'tcx> RegionDefinition<'tcx> {
|
impl<'tcx> RegionDefinition<'tcx> {
|
||||||
|
|||||||
@ -1,5 +1,6 @@
|
|||||||
use rustc_data_structures::fx::{FxIndexMap, FxIndexSet};
|
use rustc_data_structures::fx::{FxIndexMap, FxIndexSet};
|
||||||
use rustc_errors::ErrorGuaranteed;
|
use rustc_errors::ErrorGuaranteed;
|
||||||
|
use rustc_hir::def::DefKind;
|
||||||
use rustc_hir::def_id::LocalDefId;
|
use rustc_hir::def_id::LocalDefId;
|
||||||
use rustc_hir::OpaqueTyOrigin;
|
use rustc_hir::OpaqueTyOrigin;
|
||||||
use rustc_infer::infer::InferCtxt;
|
use rustc_infer::infer::InferCtxt;
|
||||||
@ -308,20 +309,19 @@ fn check_opaque_type_well_formed<'tcx>(
|
|||||||
return Ok(definition_ty);
|
return Ok(definition_ty);
|
||||||
};
|
};
|
||||||
let param_env = tcx.param_env(def_id);
|
let param_env = tcx.param_env(def_id);
|
||||||
// HACK This bubble is required for this tests to pass:
|
|
||||||
// nested-return-type2-tait2.rs
|
let mut parent_def_id = def_id;
|
||||||
// nested-return-type2-tait3.rs
|
while tcx.def_kind(parent_def_id) == DefKind::OpaqueTy {
|
||||||
|
parent_def_id = tcx.local_parent(parent_def_id);
|
||||||
|
}
|
||||||
|
|
||||||
// FIXME(-Ztrait-solver=next): We probably should use `DefiningAnchor::Error`
|
// FIXME(-Ztrait-solver=next): We probably should use `DefiningAnchor::Error`
|
||||||
// and prepopulate this `InferCtxt` with known opaque values, rather than
|
// and prepopulate this `InferCtxt` with known opaque values, rather than
|
||||||
// using the `Bind` anchor here. For now it's fine.
|
// using the `Bind` anchor here. For now it's fine.
|
||||||
let infcx = tcx
|
let infcx = tcx
|
||||||
.infer_ctxt()
|
.infer_ctxt()
|
||||||
.with_next_trait_solver(next_trait_solver)
|
.with_next_trait_solver(next_trait_solver)
|
||||||
.with_opaque_type_inference(if next_trait_solver {
|
.with_opaque_type_inference(DefiningAnchor::Bind(parent_def_id))
|
||||||
DefiningAnchor::Bind(def_id)
|
|
||||||
} else {
|
|
||||||
DefiningAnchor::Bubble
|
|
||||||
})
|
|
||||||
.build();
|
.build();
|
||||||
let ocx = ObligationCtxt::new(&infcx);
|
let ocx = ObligationCtxt::new(&infcx);
|
||||||
let identity_args = GenericArgs::identity_for_item(tcx, def_id);
|
let identity_args = GenericArgs::identity_for_item(tcx, def_id);
|
||||||
@ -361,7 +361,7 @@ fn check_opaque_type_well_formed<'tcx>(
|
|||||||
if errors.is_empty() {
|
if errors.is_empty() {
|
||||||
Ok(definition_ty)
|
Ok(definition_ty)
|
||||||
} else {
|
} else {
|
||||||
Err(infcx.err_ctxt().report_fulfillment_errors(&errors))
|
Err(infcx.err_ctxt().report_fulfillment_errors(errors))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -176,6 +176,11 @@ impl<N: Idx> LivenessValues<N> {
|
|||||||
pub(crate) fn region_value_str(&self, r: N) -> String {
|
pub(crate) fn region_value_str(&self, r: N) -> String {
|
||||||
region_value_str(self.get_elements(r).map(RegionElement::Location))
|
region_value_str(self.get_elements(r).map(RegionElement::Location))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
pub(crate) fn point_from_location(&self, location: Location) -> PointIndex {
|
||||||
|
self.elements.point_from_location(location)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Maps from `ty::PlaceholderRegion` values that are used in the rest of
|
/// Maps from `ty::PlaceholderRegion` values that are used in the rest of
|
||||||
|
|||||||
@ -81,6 +81,10 @@ impl<'a, 'tcx> MutVisitor<'tcx> for RegionRenumberer<'a, 'tcx> {
|
|||||||
|
|
||||||
#[instrument(skip(self), level = "debug")]
|
#[instrument(skip(self), level = "debug")]
|
||||||
fn visit_ty(&mut self, ty: &mut Ty<'tcx>, ty_context: TyContext) {
|
fn visit_ty(&mut self, ty: &mut Ty<'tcx>, ty_context: TyContext) {
|
||||||
|
if matches!(ty_context, TyContext::ReturnTy(_)) {
|
||||||
|
// We will renumber the return ty when called again with `TyContext::LocalDecl`
|
||||||
|
return;
|
||||||
|
}
|
||||||
*ty = self.renumber_regions(*ty, || RegionCtxt::TyContext(ty_context));
|
*ty = self.renumber_regions(*ty, || RegionCtxt::TyContext(ty_context));
|
||||||
|
|
||||||
debug!(?ty);
|
debug!(?ty);
|
||||||
|
|||||||
@ -139,23 +139,23 @@ pub(crate) enum RequireStaticErr {
|
|||||||
|
|
||||||
#[derive(Subdiagnostic)]
|
#[derive(Subdiagnostic)]
|
||||||
pub(crate) enum CaptureVarPathUseCause {
|
pub(crate) enum CaptureVarPathUseCause {
|
||||||
#[label(borrowck_borrow_due_to_use_generator)]
|
#[label(borrowck_borrow_due_to_use_coroutine)]
|
||||||
BorrowInGenerator {
|
BorrowInCoroutine {
|
||||||
#[primary_span]
|
#[primary_span]
|
||||||
path_span: Span,
|
path_span: Span,
|
||||||
},
|
},
|
||||||
#[label(borrowck_use_due_to_use_generator)]
|
#[label(borrowck_use_due_to_use_coroutine)]
|
||||||
UseInGenerator {
|
UseInCoroutine {
|
||||||
#[primary_span]
|
#[primary_span]
|
||||||
path_span: Span,
|
path_span: Span,
|
||||||
},
|
},
|
||||||
#[label(borrowck_assign_due_to_use_generator)]
|
#[label(borrowck_assign_due_to_use_coroutine)]
|
||||||
AssignInGenerator {
|
AssignInCoroutine {
|
||||||
#[primary_span]
|
#[primary_span]
|
||||||
path_span: Span,
|
path_span: Span,
|
||||||
},
|
},
|
||||||
#[label(borrowck_assign_part_due_to_use_generator)]
|
#[label(borrowck_assign_part_due_to_use_coroutine)]
|
||||||
AssignPartInGenerator {
|
AssignPartInCoroutine {
|
||||||
#[primary_span]
|
#[primary_span]
|
||||||
path_span: Span,
|
path_span: Span,
|
||||||
},
|
},
|
||||||
@ -202,8 +202,8 @@ pub(crate) enum CaptureVarKind {
|
|||||||
|
|
||||||
#[derive(Subdiagnostic)]
|
#[derive(Subdiagnostic)]
|
||||||
pub(crate) enum CaptureVarCause {
|
pub(crate) enum CaptureVarCause {
|
||||||
#[label(borrowck_var_borrow_by_use_place_in_generator)]
|
#[label(borrowck_var_borrow_by_use_place_in_coroutine)]
|
||||||
BorrowUsePlaceGenerator {
|
BorrowUsePlaceCoroutine {
|
||||||
is_single_var: bool,
|
is_single_var: bool,
|
||||||
place: String,
|
place: String,
|
||||||
#[primary_span]
|
#[primary_span]
|
||||||
@ -216,8 +216,8 @@ pub(crate) enum CaptureVarCause {
|
|||||||
#[primary_span]
|
#[primary_span]
|
||||||
var_span: Span,
|
var_span: Span,
|
||||||
},
|
},
|
||||||
#[label(borrowck_var_borrow_by_use_in_generator)]
|
#[label(borrowck_var_borrow_by_use_in_coroutine)]
|
||||||
BorrowUseInGenerator {
|
BorrowUseInCoroutine {
|
||||||
#[primary_span]
|
#[primary_span]
|
||||||
var_span: Span,
|
var_span: Span,
|
||||||
},
|
},
|
||||||
@ -226,8 +226,8 @@ pub(crate) enum CaptureVarCause {
|
|||||||
#[primary_span]
|
#[primary_span]
|
||||||
var_span: Span,
|
var_span: Span,
|
||||||
},
|
},
|
||||||
#[label(borrowck_var_move_by_use_in_generator)]
|
#[label(borrowck_var_move_by_use_in_coroutine)]
|
||||||
MoveUseInGenerator {
|
MoveUseInCoroutine {
|
||||||
#[primary_span]
|
#[primary_span]
|
||||||
var_span: Span,
|
var_span: Span,
|
||||||
},
|
},
|
||||||
@ -236,8 +236,8 @@ pub(crate) enum CaptureVarCause {
|
|||||||
#[primary_span]
|
#[primary_span]
|
||||||
var_span: Span,
|
var_span: Span,
|
||||||
},
|
},
|
||||||
#[label(borrowck_var_first_borrow_by_use_place_in_generator)]
|
#[label(borrowck_var_first_borrow_by_use_place_in_coroutine)]
|
||||||
FirstBorrowUsePlaceGenerator {
|
FirstBorrowUsePlaceCoroutine {
|
||||||
place: String,
|
place: String,
|
||||||
#[primary_span]
|
#[primary_span]
|
||||||
var_span: Span,
|
var_span: Span,
|
||||||
@ -248,8 +248,8 @@ pub(crate) enum CaptureVarCause {
|
|||||||
#[primary_span]
|
#[primary_span]
|
||||||
var_span: Span,
|
var_span: Span,
|
||||||
},
|
},
|
||||||
#[label(borrowck_var_second_borrow_by_use_place_in_generator)]
|
#[label(borrowck_var_second_borrow_by_use_place_in_coroutine)]
|
||||||
SecondBorrowUsePlaceGenerator {
|
SecondBorrowUsePlaceCoroutine {
|
||||||
place: String,
|
place: String,
|
||||||
#[primary_span]
|
#[primary_span]
|
||||||
var_span: Span,
|
var_span: Span,
|
||||||
@ -266,8 +266,8 @@ pub(crate) enum CaptureVarCause {
|
|||||||
#[primary_span]
|
#[primary_span]
|
||||||
var_span: Span,
|
var_span: Span,
|
||||||
},
|
},
|
||||||
#[label(borrowck_partial_var_move_by_use_in_generator)]
|
#[label(borrowck_partial_var_move_by_use_in_coroutine)]
|
||||||
PartialMoveUseInGenerator {
|
PartialMoveUseInCoroutine {
|
||||||
#[primary_span]
|
#[primary_span]
|
||||||
var_span: Span,
|
var_span: Span,
|
||||||
is_partial: bool,
|
is_partial: bool,
|
||||||
|
|||||||
@ -49,7 +49,9 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
|||||||
// If the query has created new universes and errors are going to be emitted, register the
|
// If the query has created new universes and errors are going to be emitted, register the
|
||||||
// cause of these new universes for improved diagnostics.
|
// cause of these new universes for improved diagnostics.
|
||||||
let universe = self.infcx.universe();
|
let universe = self.infcx.universe();
|
||||||
if old_universe != universe && let Some(error_info) = error_info {
|
if old_universe != universe
|
||||||
|
&& let Some(error_info) = error_info
|
||||||
|
{
|
||||||
let universe_info = error_info.to_universe_info(old_universe);
|
let universe_info = error_info.to_universe_info(old_universe);
|
||||||
for u in (old_universe + 1)..=universe {
|
for u in (old_universe + 1)..=universe {
|
||||||
self.borrowck_context.constraints.universe_causes.insert(u, universe_info.clone());
|
self.borrowck_context.constraints.universe_causes.insert(u, universe_info.clone());
|
||||||
|
|||||||
@ -8,7 +8,7 @@ use rustc_infer::infer::InferCtxt;
|
|||||||
use rustc_middle::mir::ConstraintCategory;
|
use rustc_middle::mir::ConstraintCategory;
|
||||||
use rustc_middle::traits::query::OutlivesBound;
|
use rustc_middle::traits::query::OutlivesBound;
|
||||||
use rustc_middle::ty::{self, RegionVid, Ty};
|
use rustc_middle::ty::{self, RegionVid, Ty};
|
||||||
use rustc_span::{Span, DUMMY_SP};
|
use rustc_span::{ErrorGuaranteed, Span, DUMMY_SP};
|
||||||
use rustc_trait_selection::traits::query::type_op::{self, TypeOp};
|
use rustc_trait_selection::traits::query::type_op::{self, TypeOp};
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
use type_op::TypeOpOutput;
|
use type_op::TypeOpOutput;
|
||||||
@ -318,7 +318,8 @@ impl<'tcx> UniversalRegionRelationsBuilder<'_, 'tcx> {
|
|||||||
.param_env
|
.param_env
|
||||||
.and(type_op::implied_outlives_bounds::ImpliedOutlivesBounds { ty })
|
.and(type_op::implied_outlives_bounds::ImpliedOutlivesBounds { ty })
|
||||||
.fully_perform(self.infcx, DUMMY_SP)
|
.fully_perform(self.infcx, DUMMY_SP)
|
||||||
.unwrap_or_else(|_| bug!("failed to compute implied bounds {:?}", ty));
|
.map_err(|_: ErrorGuaranteed| debug!("failed to compute implied bounds {:?}", ty))
|
||||||
|
.ok()?;
|
||||||
debug!(?bounds, ?constraints);
|
debug!(?bounds, ?constraints);
|
||||||
self.add_outlives_bounds(bounds);
|
self.add_outlives_bounds(bounds);
|
||||||
constraints
|
constraints
|
||||||
|
|||||||
@ -101,7 +101,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
|||||||
);
|
);
|
||||||
|
|
||||||
// We will not have a universal_regions.yield_ty if we yield (by accident)
|
// We will not have a universal_regions.yield_ty if we yield (by accident)
|
||||||
// outside of a generator and return an `impl Trait`, so emit a delay_span_bug
|
// outside of a coroutine and return an `impl Trait`, so emit a delay_span_bug
|
||||||
// because we don't want to panic in an assert here if we've already got errors.
|
// because we don't want to panic in an assert here if we've already got errors.
|
||||||
if body.yield_ty().is_some() != universal_regions.yield_ty.is_some() {
|
if body.yield_ty().is_some() != universal_regions.yield_ty.is_some() {
|
||||||
self.tcx().sess.delay_span_bug(
|
self.tcx().sess.delay_span_bug(
|
||||||
|
|||||||
@ -1,10 +1,12 @@
|
|||||||
use rustc_data_structures::fx::{FxIndexMap, FxIndexSet};
|
use rustc_data_structures::fx::{FxIndexMap, FxIndexSet};
|
||||||
use rustc_index::bit_set::HybridBitSet;
|
use rustc_data_structures::graph::WithSuccessors;
|
||||||
|
use rustc_index::bit_set::{HybridBitSet, SparseBitMatrix};
|
||||||
use rustc_index::interval::IntervalSet;
|
use rustc_index::interval::IntervalSet;
|
||||||
use rustc_infer::infer::canonical::QueryRegionConstraints;
|
use rustc_infer::infer::canonical::QueryRegionConstraints;
|
||||||
|
use rustc_infer::infer::outlives::for_liveness;
|
||||||
use rustc_middle::mir::{BasicBlock, Body, ConstraintCategory, Local, Location};
|
use rustc_middle::mir::{BasicBlock, Body, ConstraintCategory, Local, Location};
|
||||||
use rustc_middle::traits::query::DropckOutlivesResult;
|
use rustc_middle::traits::query::DropckOutlivesResult;
|
||||||
use rustc_middle::ty::{Ty, TyCtxt, TypeVisitable, TypeVisitableExt};
|
use rustc_middle::ty::{RegionVid, Ty, TyCtxt, TypeVisitable, TypeVisitableExt};
|
||||||
use rustc_span::DUMMY_SP;
|
use rustc_span::DUMMY_SP;
|
||||||
use rustc_trait_selection::traits::query::type_op::outlives::DropckOutlives;
|
use rustc_trait_selection::traits::query::type_op::outlives::DropckOutlives;
|
||||||
use rustc_trait_selection::traits::query::type_op::{TypeOp, TypeOpOutput};
|
use rustc_trait_selection::traits::query::type_op::{TypeOp, TypeOpOutput};
|
||||||
@ -14,6 +16,7 @@ use rustc_mir_dataflow::impls::MaybeInitializedPlaces;
|
|||||||
use rustc_mir_dataflow::move_paths::{HasMoveData, MoveData, MovePathIndex};
|
use rustc_mir_dataflow::move_paths::{HasMoveData, MoveData, MovePathIndex};
|
||||||
use rustc_mir_dataflow::ResultsCursor;
|
use rustc_mir_dataflow::ResultsCursor;
|
||||||
|
|
||||||
|
use crate::dataflow::BorrowIndex;
|
||||||
use crate::{
|
use crate::{
|
||||||
region_infer::values::{self, PointIndex, RegionValueElements},
|
region_infer::values::{self, PointIndex, RegionValueElements},
|
||||||
type_check::liveness::local_use_map::LocalUseMap,
|
type_check::liveness::local_use_map::LocalUseMap,
|
||||||
@ -50,6 +53,33 @@ pub(super) fn trace<'mir, 'tcx>(
|
|||||||
|
|
||||||
let local_use_map = &LocalUseMap::build(&relevant_live_locals, elements, body);
|
let local_use_map = &LocalUseMap::build(&relevant_live_locals, elements, body);
|
||||||
|
|
||||||
|
// When using `-Zpolonius=next`, compute the set of loans that can reach a given region.
|
||||||
|
let num_loans = typeck.borrowck_context.borrow_set.len();
|
||||||
|
let mut inflowing_loans = SparseBitMatrix::new(num_loans);
|
||||||
|
if typeck.tcx().sess.opts.unstable_opts.polonius.is_next_enabled() {
|
||||||
|
let borrowck_context = &typeck.borrowck_context;
|
||||||
|
let borrow_set = &borrowck_context.borrow_set;
|
||||||
|
let constraint_set = &borrowck_context.constraints.outlives_constraints;
|
||||||
|
|
||||||
|
let num_region_vars = typeck.infcx.num_region_vars();
|
||||||
|
let graph = constraint_set.graph(num_region_vars);
|
||||||
|
let region_graph =
|
||||||
|
graph.region_graph(&constraint_set, borrowck_context.universal_regions.fr_static);
|
||||||
|
|
||||||
|
// Traverse each issuing region's constraints, and record the loan as flowing into the
|
||||||
|
// outlived region.
|
||||||
|
for (loan, issuing_region_data) in borrow_set.iter_enumerated() {
|
||||||
|
for succ in region_graph.depth_first_search(issuing_region_data.region) {
|
||||||
|
// We don't need to mention that a loan flows into its issuing region.
|
||||||
|
if succ == issuing_region_data.region {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
inflowing_loans.insert(succ, loan);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
let cx = LivenessContext {
|
let cx = LivenessContext {
|
||||||
typeck,
|
typeck,
|
||||||
body,
|
body,
|
||||||
@ -58,6 +88,7 @@ pub(super) fn trace<'mir, 'tcx>(
|
|||||||
local_use_map,
|
local_use_map,
|
||||||
move_data,
|
move_data,
|
||||||
drop_data: FxIndexMap::default(),
|
drop_data: FxIndexMap::default(),
|
||||||
|
inflowing_loans,
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut results = LivenessResults::new(cx);
|
let mut results = LivenessResults::new(cx);
|
||||||
@ -71,7 +102,7 @@ pub(super) fn trace<'mir, 'tcx>(
|
|||||||
results.dropck_boring_locals(boring_locals);
|
results.dropck_boring_locals(boring_locals);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Contextual state for the type-liveness generator.
|
/// Contextual state for the type-liveness coroutine.
|
||||||
struct LivenessContext<'me, 'typeck, 'flow, 'tcx> {
|
struct LivenessContext<'me, 'typeck, 'flow, 'tcx> {
|
||||||
/// Current type-checker, giving us our inference context etc.
|
/// Current type-checker, giving us our inference context etc.
|
||||||
typeck: &'me mut TypeChecker<'typeck, 'tcx>,
|
typeck: &'me mut TypeChecker<'typeck, 'tcx>,
|
||||||
@ -95,6 +126,9 @@ struct LivenessContext<'me, 'typeck, 'flow, 'tcx> {
|
|||||||
/// Index indicating where each variable is assigned, used, or
|
/// Index indicating where each variable is assigned, used, or
|
||||||
/// dropped.
|
/// dropped.
|
||||||
local_use_map: &'me LocalUseMap,
|
local_use_map: &'me LocalUseMap,
|
||||||
|
|
||||||
|
/// Set of loans that flow into a given region, when using `-Zpolonius=next`.
|
||||||
|
inflowing_loans: SparseBitMatrix<RegionVid, BorrowIndex>,
|
||||||
}
|
}
|
||||||
|
|
||||||
struct DropData<'tcx> {
|
struct DropData<'tcx> {
|
||||||
@ -284,7 +318,7 @@ impl<'me, 'typeck, 'flow, 'tcx> LivenessResults<'me, 'typeck, 'flow, 'tcx> {
|
|||||||
fn compute_drop_live_points_for(&mut self, local: Local) {
|
fn compute_drop_live_points_for(&mut self, local: Local) {
|
||||||
debug!("compute_drop_live_points_for(local={:?})", local);
|
debug!("compute_drop_live_points_for(local={:?})", local);
|
||||||
|
|
||||||
let mpi = self.cx.move_data.rev_lookup.find_local(local);
|
let Some(mpi) = self.cx.move_data.rev_lookup.find_local(local) else { return };
|
||||||
debug!("compute_drop_live_points_for: mpi = {:?}", mpi);
|
debug!("compute_drop_live_points_for: mpi = {:?}", mpi);
|
||||||
|
|
||||||
// Find the drops where `local` is initialized.
|
// Find the drops where `local` is initialized.
|
||||||
@ -486,7 +520,13 @@ impl<'tcx> LivenessContext<'_, '_, '_, 'tcx> {
|
|||||||
) {
|
) {
|
||||||
debug!("add_use_live_facts_for(value={:?})", value);
|
debug!("add_use_live_facts_for(value={:?})", value);
|
||||||
|
|
||||||
Self::make_all_regions_live(self.elements, &mut self.typeck, value, live_at)
|
Self::make_all_regions_live(
|
||||||
|
self.elements,
|
||||||
|
&mut self.typeck,
|
||||||
|
value,
|
||||||
|
live_at,
|
||||||
|
&self.inflowing_loans,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Some variable with type `live_ty` is "drop live" at `location`
|
/// Some variable with type `live_ty` is "drop live" at `location`
|
||||||
@ -537,7 +577,13 @@ impl<'tcx> LivenessContext<'_, '_, '_, 'tcx> {
|
|||||||
// All things in the `outlives` array may be touched by
|
// All things in the `outlives` array may be touched by
|
||||||
// the destructor and must be live at this point.
|
// the destructor and must be live at this point.
|
||||||
for &kind in &drop_data.dropck_result.kinds {
|
for &kind in &drop_data.dropck_result.kinds {
|
||||||
Self::make_all_regions_live(self.elements, &mut self.typeck, kind, live_at);
|
Self::make_all_regions_live(
|
||||||
|
self.elements,
|
||||||
|
&mut self.typeck,
|
||||||
|
kind,
|
||||||
|
live_at,
|
||||||
|
&self.inflowing_loans,
|
||||||
|
);
|
||||||
|
|
||||||
polonius::add_drop_of_var_derefs_origin(&mut self.typeck, dropped_local, &kind);
|
polonius::add_drop_of_var_derefs_origin(&mut self.typeck, dropped_local, &kind);
|
||||||
}
|
}
|
||||||
@ -548,6 +594,7 @@ impl<'tcx> LivenessContext<'_, '_, '_, 'tcx> {
|
|||||||
typeck: &mut TypeChecker<'_, 'tcx>,
|
typeck: &mut TypeChecker<'_, 'tcx>,
|
||||||
value: impl TypeVisitable<TyCtxt<'tcx>>,
|
value: impl TypeVisitable<TyCtxt<'tcx>>,
|
||||||
live_at: &IntervalSet<PointIndex>,
|
live_at: &IntervalSet<PointIndex>,
|
||||||
|
inflowing_loans: &SparseBitMatrix<RegionVid, BorrowIndex>,
|
||||||
) {
|
) {
|
||||||
debug!("make_all_regions_live(value={:?})", value);
|
debug!("make_all_regions_live(value={:?})", value);
|
||||||
debug!(
|
debug!(
|
||||||
@ -555,16 +602,38 @@ impl<'tcx> LivenessContext<'_, '_, '_, 'tcx> {
|
|||||||
values::location_set_str(elements, live_at.iter()),
|
values::location_set_str(elements, live_at.iter()),
|
||||||
);
|
);
|
||||||
|
|
||||||
let tcx = typeck.tcx();
|
// When using `-Zpolonius=next`, we want to record the loans that flow into this value's
|
||||||
tcx.for_each_free_region(&value, |live_region| {
|
// regions as being live at the given `live_at` points: this will be used to compute the
|
||||||
let live_region_vid =
|
// location where a loan goes out of scope.
|
||||||
typeck.borrowck_context.universal_regions.to_region_vid(live_region);
|
let num_loans = typeck.borrowck_context.borrow_set.len();
|
||||||
typeck
|
let value_loans = &mut HybridBitSet::new_empty(num_loans);
|
||||||
.borrowck_context
|
|
||||||
.constraints
|
value.visit_with(&mut for_liveness::FreeRegionsVisitor {
|
||||||
.liveness_constraints
|
tcx: typeck.tcx(),
|
||||||
.add_elements(live_region_vid, live_at);
|
param_env: typeck.param_env,
|
||||||
|
op: |r| {
|
||||||
|
let live_region_vid = typeck.borrowck_context.universal_regions.to_region_vid(r);
|
||||||
|
|
||||||
|
typeck
|
||||||
|
.borrowck_context
|
||||||
|
.constraints
|
||||||
|
.liveness_constraints
|
||||||
|
.add_elements(live_region_vid, live_at);
|
||||||
|
|
||||||
|
// There can only be inflowing loans for this region when we are using
|
||||||
|
// `-Zpolonius=next`.
|
||||||
|
if let Some(inflowing) = inflowing_loans.row(live_region_vid) {
|
||||||
|
value_loans.union(inflowing);
|
||||||
|
}
|
||||||
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Record the loans reaching the value.
|
||||||
|
if !value_loans.is_empty() {
|
||||||
|
for point in live_at.iter() {
|
||||||
|
typeck.borrowck_context.live_loans.union_row(point, value_loans);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn compute_drop_data(
|
fn compute_drop_data(
|
||||||
|
|||||||
@ -14,6 +14,7 @@ use rustc_hir as hir;
|
|||||||
use rustc_hir::def::DefKind;
|
use rustc_hir::def::DefKind;
|
||||||
use rustc_hir::def_id::LocalDefId;
|
use rustc_hir::def_id::LocalDefId;
|
||||||
use rustc_hir::lang_items::LangItem;
|
use rustc_hir::lang_items::LangItem;
|
||||||
|
use rustc_index::bit_set::SparseBitMatrix;
|
||||||
use rustc_index::{IndexSlice, IndexVec};
|
use rustc_index::{IndexSlice, IndexVec};
|
||||||
use rustc_infer::infer::canonical::QueryRegionConstraints;
|
use rustc_infer::infer::canonical::QueryRegionConstraints;
|
||||||
use rustc_infer::infer::outlives::env::RegionBoundPairs;
|
use rustc_infer::infer::outlives::env::RegionBoundPairs;
|
||||||
@ -50,6 +51,8 @@ use rustc_mir_dataflow::impls::MaybeInitializedPlaces;
|
|||||||
use rustc_mir_dataflow::move_paths::MoveData;
|
use rustc_mir_dataflow::move_paths::MoveData;
|
||||||
use rustc_mir_dataflow::ResultsCursor;
|
use rustc_mir_dataflow::ResultsCursor;
|
||||||
|
|
||||||
|
use crate::dataflow::BorrowIndex;
|
||||||
|
use crate::region_infer::values::PointIndex;
|
||||||
use crate::session_diagnostics::{MoveUnsized, SimdShuffleLastConst};
|
use crate::session_diagnostics::{MoveUnsized, SimdShuffleLastConst};
|
||||||
use crate::{
|
use crate::{
|
||||||
borrow_set::BorrowSet,
|
borrow_set::BorrowSet,
|
||||||
@ -163,6 +166,9 @@ pub(crate) fn type_check<'mir, 'tcx>(
|
|||||||
|
|
||||||
debug!(?normalized_inputs_and_output);
|
debug!(?normalized_inputs_and_output);
|
||||||
|
|
||||||
|
// When using `-Zpolonius=next`, liveness will record the set of live loans per point.
|
||||||
|
let mut live_loans = SparseBitMatrix::new(borrow_set.len());
|
||||||
|
|
||||||
let mut borrowck_context = BorrowCheckContext {
|
let mut borrowck_context = BorrowCheckContext {
|
||||||
universal_regions,
|
universal_regions,
|
||||||
location_table,
|
location_table,
|
||||||
@ -170,6 +176,7 @@ pub(crate) fn type_check<'mir, 'tcx>(
|
|||||||
all_facts,
|
all_facts,
|
||||||
constraints: &mut constraints,
|
constraints: &mut constraints,
|
||||||
upvars,
|
upvars,
|
||||||
|
live_loans: &mut live_loans,
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut checker = TypeChecker::new(
|
let mut checker = TypeChecker::new(
|
||||||
@ -181,11 +188,7 @@ pub(crate) fn type_check<'mir, 'tcx>(
|
|||||||
&mut borrowck_context,
|
&mut borrowck_context,
|
||||||
);
|
);
|
||||||
|
|
||||||
// FIXME(-Ztrait-solver=next): A bit dubious that we're only registering
|
checker.check_user_type_annotations();
|
||||||
// predefined opaques in the typeck root.
|
|
||||||
if infcx.next_trait_solver() && !infcx.tcx.is_typeck_child(body.source.def_id()) {
|
|
||||||
checker.register_predefined_opaques_in_new_solver();
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut verifier = TypeVerifier::new(&mut checker, promoted);
|
let mut verifier = TypeVerifier::new(&mut checker, promoted);
|
||||||
verifier.visit_body(&body);
|
verifier.visit_body(&body);
|
||||||
@ -240,7 +243,7 @@ pub(crate) fn type_check<'mir, 'tcx>(
|
|||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
MirTypeckResults { constraints, universal_region_relations, opaque_type_values }
|
MirTypeckResults { constraints, universal_region_relations, opaque_type_values, live_loans }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn translate_outlives_facts(typeck: &mut TypeChecker<'_, '_>) {
|
fn translate_outlives_facts(typeck: &mut TypeChecker<'_, '_>) {
|
||||||
@ -664,8 +667,8 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> {
|
|||||||
PlaceTy { ty: base_ty, variant_index: Some(index) }
|
PlaceTy { ty: base_ty, variant_index: Some(index) }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// We do not need to handle generators here, because this runs
|
// We do not need to handle coroutines here, because this runs
|
||||||
// before the generator transform stage.
|
// before the coroutine transform stage.
|
||||||
_ => {
|
_ => {
|
||||||
let ty = if let Some(name) = maybe_name {
|
let ty = if let Some(name) = maybe_name {
|
||||||
span_mirbug_and_err!(
|
span_mirbug_and_err!(
|
||||||
@ -767,13 +770,13 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> {
|
|||||||
let (variant, args) = match base_ty {
|
let (variant, args) = match base_ty {
|
||||||
PlaceTy { ty, variant_index: Some(variant_index) } => match *ty.kind() {
|
PlaceTy { ty, variant_index: Some(variant_index) } => match *ty.kind() {
|
||||||
ty::Adt(adt_def, args) => (adt_def.variant(variant_index), args),
|
ty::Adt(adt_def, args) => (adt_def.variant(variant_index), args),
|
||||||
ty::Generator(def_id, args, _) => {
|
ty::Coroutine(def_id, args, _) => {
|
||||||
let mut variants = args.as_generator().state_tys(def_id, tcx);
|
let mut variants = args.as_coroutine().state_tys(def_id, tcx);
|
||||||
let Some(mut variant) = variants.nth(variant_index.into()) else {
|
let Some(mut variant) = variants.nth(variant_index.into()) else {
|
||||||
bug!(
|
bug!(
|
||||||
"variant_index of generator out of range: {:?}/{:?}",
|
"variant_index of coroutine out of range: {:?}/{:?}",
|
||||||
variant_index,
|
variant_index,
|
||||||
args.as_generator().state_tys(def_id, tcx).count()
|
args.as_coroutine().state_tys(def_id, tcx).count()
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
return match variant.nth(field.index()) {
|
return match variant.nth(field.index()) {
|
||||||
@ -781,7 +784,7 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> {
|
|||||||
None => Err(FieldAccessError::OutOfRange { field_count: variant.count() }),
|
None => Err(FieldAccessError::OutOfRange { field_count: variant.count() }),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
_ => bug!("can't have downcast of non-adt non-generator type"),
|
_ => bug!("can't have downcast of non-adt non-coroutine type"),
|
||||||
},
|
},
|
||||||
PlaceTy { ty, variant_index: None } => match *ty.kind() {
|
PlaceTy { ty, variant_index: None } => match *ty.kind() {
|
||||||
ty::Adt(adt_def, args) if !adt_def.is_enum() => {
|
ty::Adt(adt_def, args) if !adt_def.is_enum() => {
|
||||||
@ -795,13 +798,13 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> {
|
|||||||
}),
|
}),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
ty::Generator(_, args, _) => {
|
ty::Coroutine(_, args, _) => {
|
||||||
// Only prefix fields (upvars and current state) are
|
// Only prefix fields (upvars and current state) are
|
||||||
// accessible without a variant index.
|
// accessible without a variant index.
|
||||||
return match args.as_generator().prefix_tys().get(field.index()) {
|
return match args.as_coroutine().prefix_tys().get(field.index()) {
|
||||||
Some(ty) => Ok(*ty),
|
Some(ty) => Ok(*ty),
|
||||||
None => Err(FieldAccessError::OutOfRange {
|
None => Err(FieldAccessError::OutOfRange {
|
||||||
field_count: args.as_generator().prefix_tys().len(),
|
field_count: args.as_coroutine().prefix_tys().len(),
|
||||||
}),
|
}),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@ -855,12 +858,21 @@ struct BorrowCheckContext<'a, 'tcx> {
|
|||||||
borrow_set: &'a BorrowSet<'tcx>,
|
borrow_set: &'a BorrowSet<'tcx>,
|
||||||
pub(crate) constraints: &'a mut MirTypeckRegionConstraints<'tcx>,
|
pub(crate) constraints: &'a mut MirTypeckRegionConstraints<'tcx>,
|
||||||
upvars: &'a [Upvar<'tcx>],
|
upvars: &'a [Upvar<'tcx>],
|
||||||
|
|
||||||
|
/// The set of loans that are live at a given point in the CFG, filled in by `liveness::trace`,
|
||||||
|
/// when using `-Zpolonius=next`.
|
||||||
|
pub(crate) live_loans: &'a mut SparseBitMatrix<PointIndex, BorrowIndex>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Holder struct for passing results from MIR typeck to the rest of the non-lexical regions
|
||||||
|
/// inference computation.
|
||||||
pub(crate) struct MirTypeckResults<'tcx> {
|
pub(crate) struct MirTypeckResults<'tcx> {
|
||||||
pub(crate) constraints: MirTypeckRegionConstraints<'tcx>,
|
pub(crate) constraints: MirTypeckRegionConstraints<'tcx>,
|
||||||
pub(crate) universal_region_relations: Frozen<UniversalRegionRelations<'tcx>>,
|
pub(crate) universal_region_relations: Frozen<UniversalRegionRelations<'tcx>>,
|
||||||
pub(crate) opaque_type_values: FxIndexMap<OpaqueTypeKey<'tcx>, OpaqueHiddenType<'tcx>>,
|
pub(crate) opaque_type_values: FxIndexMap<OpaqueTypeKey<'tcx>, OpaqueHiddenType<'tcx>>,
|
||||||
|
|
||||||
|
/// The set of loans that are live at a given point in the CFG, when using `-Zpolonius=next`.
|
||||||
|
pub(crate) live_loans: SparseBitMatrix<PointIndex, BorrowIndex>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A collection of region constraints that must be satisfied for the
|
/// A collection of region constraints that must be satisfied for the
|
||||||
@ -1005,7 +1017,13 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
|||||||
borrowck_context,
|
borrowck_context,
|
||||||
reported_errors: Default::default(),
|
reported_errors: Default::default(),
|
||||||
};
|
};
|
||||||
checker.check_user_type_annotations();
|
|
||||||
|
// FIXME(-Ztrait-solver=next): A bit dubious that we're only registering
|
||||||
|
// predefined opaques in the typeck root.
|
||||||
|
if infcx.next_trait_solver() && !infcx.tcx.is_typeck_child(body.source.def_id()) {
|
||||||
|
checker.register_predefined_opaques_in_new_solver();
|
||||||
|
}
|
||||||
|
|
||||||
checker
|
checker
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1335,7 +1353,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
|||||||
| TerminatorKind::UnwindResume
|
| TerminatorKind::UnwindResume
|
||||||
| TerminatorKind::UnwindTerminate(_)
|
| TerminatorKind::UnwindTerminate(_)
|
||||||
| TerminatorKind::Return
|
| TerminatorKind::Return
|
||||||
| TerminatorKind::GeneratorDrop
|
| TerminatorKind::CoroutineDrop
|
||||||
| TerminatorKind::Unreachable
|
| TerminatorKind::Unreachable
|
||||||
| TerminatorKind::Drop { .. }
|
| TerminatorKind::Drop { .. }
|
||||||
| TerminatorKind::FalseEdge { .. }
|
| TerminatorKind::FalseEdge { .. }
|
||||||
@ -1452,7 +1470,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
|||||||
|
|
||||||
let value_ty = value.ty(body, tcx);
|
let value_ty = value.ty(body, tcx);
|
||||||
match body.yield_ty() {
|
match body.yield_ty() {
|
||||||
None => span_mirbug!(self, term, "yield in non-generator"),
|
None => span_mirbug!(self, term, "yield in non-coroutine"),
|
||||||
Some(ty) => {
|
Some(ty) => {
|
||||||
if let Err(terr) = self.sub_types(
|
if let Err(terr) = self.sub_types(
|
||||||
value_ty,
|
value_ty,
|
||||||
@ -1624,7 +1642,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
|||||||
}
|
}
|
||||||
TerminatorKind::UnwindTerminate(_) => {
|
TerminatorKind::UnwindTerminate(_) => {
|
||||||
if !is_cleanup {
|
if !is_cleanup {
|
||||||
span_mirbug!(self, block_data, "abort on non-cleanup block!")
|
span_mirbug!(self, block_data, "terminate on non-cleanup block!")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
TerminatorKind::Return => {
|
TerminatorKind::Return => {
|
||||||
@ -1632,9 +1650,9 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
|||||||
span_mirbug!(self, block_data, "return on cleanup block")
|
span_mirbug!(self, block_data, "return on cleanup block")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
TerminatorKind::GeneratorDrop { .. } => {
|
TerminatorKind::CoroutineDrop { .. } => {
|
||||||
if is_cleanup {
|
if is_cleanup {
|
||||||
span_mirbug!(self, block_data, "generator_drop in cleanup block")
|
span_mirbug!(self, block_data, "coroutine_drop in cleanup block")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
TerminatorKind::Yield { resume, drop, .. } => {
|
TerminatorKind::Yield { resume, drop, .. } => {
|
||||||
@ -1781,14 +1799,14 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
|||||||
}),
|
}),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
AggregateKind::Generator(_, args, _) => {
|
AggregateKind::Coroutine(_, args, _) => {
|
||||||
// It doesn't make sense to look at a field beyond the prefix;
|
// It doesn't make sense to look at a field beyond the prefix;
|
||||||
// these require a variant index, and are not initialized in
|
// these require a variant index, and are not initialized in
|
||||||
// aggregate rvalues.
|
// aggregate rvalues.
|
||||||
match args.as_generator().prefix_tys().get(field_index.as_usize()) {
|
match args.as_coroutine().prefix_tys().get(field_index.as_usize()) {
|
||||||
Some(ty) => Ok(*ty),
|
Some(ty) => Ok(*ty),
|
||||||
None => Err(FieldAccessError::OutOfRange {
|
None => Err(FieldAccessError::OutOfRange {
|
||||||
field_count: args.as_generator().prefix_tys().len(),
|
field_count: args.as_coroutine().prefix_tys().len(),
|
||||||
}),
|
}),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -2381,7 +2399,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
|||||||
AggregateKind::Array(_) => None,
|
AggregateKind::Array(_) => None,
|
||||||
AggregateKind::Tuple => None,
|
AggregateKind::Tuple => None,
|
||||||
AggregateKind::Closure(_, _) => None,
|
AggregateKind::Closure(_, _) => None,
|
||||||
AggregateKind::Generator(_, _, _) => None,
|
AggregateKind::Coroutine(_, _, _) => None,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -2609,7 +2627,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
|||||||
// desugaring. A closure gets desugared to a struct, and
|
// desugaring. A closure gets desugared to a struct, and
|
||||||
// these extra requirements are basically like where
|
// these extra requirements are basically like where
|
||||||
// clauses on the struct.
|
// clauses on the struct.
|
||||||
AggregateKind::Closure(def_id, args) | AggregateKind::Generator(def_id, args, _) => {
|
AggregateKind::Closure(def_id, args) | AggregateKind::Coroutine(def_id, args, _) => {
|
||||||
(def_id, self.prove_closure_bounds(tcx, def_id.expect_local(), args, location))
|
(def_id, self.prove_closure_bounds(tcx, def_id.expect_local(), args, location))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2657,7 +2675,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
|||||||
|
|
||||||
let parent_args = match tcx.def_kind(def_id) {
|
let parent_args = match tcx.def_kind(def_id) {
|
||||||
DefKind::Closure => args.as_closure().parent_args(),
|
DefKind::Closure => args.as_closure().parent_args(),
|
||||||
DefKind::Generator => args.as_generator().parent_args(),
|
DefKind::Coroutine => args.as_coroutine().parent_args(),
|
||||||
DefKind::InlineConst => args.as_inline_const().parent_args(),
|
DefKind::InlineConst => args.as_inline_const().parent_args(),
|
||||||
other => bug!("unexpected item {:?}", other),
|
other => bug!("unexpected item {:?}", other),
|
||||||
};
|
};
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user