mirror of
https://git.proxmox.com/git/rustc
synced 2026-01-06 22:02:20 +00:00
New upstream version 1.74.1+dfsg1
This commit is contained in:
parent
add651ee10
commit
781aab860e
634
Cargo.lock
generated
634
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@ -43,6 +43,7 @@ members = [
|
||||
"src/tools/generate-windows-sys",
|
||||
"src/tools/rustdoc-gui-test",
|
||||
"src/tools/opt-dist",
|
||||
"src/tools/coverage-dump",
|
||||
]
|
||||
|
||||
exclude = [
|
||||
|
||||
127
RELEASES.md
127
RELEASES.md
@ -1,3 +1,125 @@
|
||||
Version 1.74.1 (2023-12-07)
|
||||
===========================
|
||||
|
||||
- [Resolved spurious STATUS_ACCESS_VIOLATIONs in LLVM](https://github.com/rust-lang/rust/pull/118464)
|
||||
- [Clarify guarantees for std::mem::discriminant](https://github.com/rust-lang/rust/pull/118006)
|
||||
- [Fix some subtyping-related regressions](https://github.com/rust-lang/rust/pull/116415)
|
||||
|
||||
Version 1.74.0 (2023-11-16)
|
||||
==========================
|
||||
|
||||
<a id="1.74.0-Language"></a>
|
||||
|
||||
Language
|
||||
--------
|
||||
|
||||
- [Codify that `std::mem::Discriminant<T>` does not depend on any lifetimes in T](https://github.com/rust-lang/rust/pull/104299/)
|
||||
- [Replace `private_in_public` lint with `private_interfaces` and `private_bounds` per RFC 2145](https://github.com/rust-lang/rust/pull/113126/)
|
||||
Read more in [RFC 2145](https://rust-lang.github.io/rfcs/2145-type-privacy.html).
|
||||
- [Allow explicit `#[repr(Rust)]`](https://github.com/rust-lang/rust/pull/114201/)
|
||||
- [closure field capturing: don't depend on alignment of packed fields](https://github.com/rust-lang/rust/pull/115315/)
|
||||
- [Enable MIR-based drop-tracking for `async` blocks](https://github.com/rust-lang/rust/pull/107421/)
|
||||
|
||||
<a id="1.74.0-Compiler"></a>
|
||||
|
||||
Compiler
|
||||
--------
|
||||
|
||||
- [stabilize combining +bundle and +whole-archive link modifiers](https://github.com/rust-lang/rust/pull/113301/)
|
||||
- [Stabilize `PATH` option for `--print KIND=PATH`](https://github.com/rust-lang/rust/pull/114183/)
|
||||
- [Enable ASAN/LSAN/TSAN for `*-apple-ios-macabi`](https://github.com/rust-lang/rust/pull/115644/)
|
||||
- [Promote loongarch64-unknown-none* to Tier 2](https://github.com/rust-lang/rust/pull/115368/)
|
||||
- [Add `i686-pc-windows-gnullvm` as a tier 3 target](https://github.com/rust-lang/rust/pull/115687/)
|
||||
|
||||
<a id="1.74.0-Libraries"></a>
|
||||
|
||||
Libraries
|
||||
---------
|
||||
|
||||
- [Implement `From<OwnedFd/Handle>` for ChildStdin/out/err](https://github.com/rust-lang/rust/pull/98704/)
|
||||
- [Implement `From<{&,&mut} [T; N]>` for `Vec<T>` where `T: Clone`](https://github.com/rust-lang/rust/pull/111278/)
|
||||
- [impl Step for IP addresses](https://github.com/rust-lang/rust/pull/113748/)
|
||||
- [Implement `From<[T; N]>` for `Rc<[T]>` and `Arc<[T]>`](https://github.com/rust-lang/rust/pull/114041/)
|
||||
- [`impl TryFrom<char> for u16`](https://github.com/rust-lang/rust/pull/114065/)
|
||||
- [Stabilize `io_error_other` feature](https://github.com/rust-lang/rust/pull/115453/)
|
||||
- [Stabilize the `Saturating` type](https://github.com/rust-lang/rust/pull/115477/)
|
||||
- [Stabilize const_transmute_copy](https://github.com/rust-lang/rust/pull/115520/)
|
||||
|
||||
<a id="1.74.0-Stabilized-APIs"></a>
|
||||
|
||||
Stabilized APIs
|
||||
---------------
|
||||
|
||||
- [`core::num::Saturating`](https://doc.rust-lang.org/stable/std/num/struct.Saturating.html)
|
||||
- [`impl From<io::Stdout> for std::process::Stdio`](https://doc.rust-lang.org/stable/std/process/struct.Stdio.html#impl-From%3CStdout%3E-for-Stdio)
|
||||
- [`impl From<io::Stderr> for std::process::Stdio`](https://doc.rust-lang.org/stable/std/process/struct.Stdio.html#impl-From%3CStderr%3E-for-Stdio)
|
||||
- [`impl From<OwnedHandle> for std::process::Child{Stdin, Stdout, Stderr}`](https://doc.rust-lang.org/stable/std/process/struct.Stdio.html#impl-From%3CStderr%3E-for-Stdio)
|
||||
- [`impl From<OwnedFd> for std::process::Child{Stdin, Stdout, Stderr}`](https://doc.rust-lang.org/stable/std/process/struct.Stdio.html#impl-From%3CStderr%3E-for-Stdio)
|
||||
- [`std::ffi::OsString::from_encoded_bytes_unchecked`](https://doc.rust-lang.org/stable/std/ffi/struct.OsString.html#method.from_encoded_bytes_unchecked)
|
||||
- [`std::ffi::OsString::into_encoded_bytes`](https://doc.rust-lang.org/stable/std/ffi/struct.OsString.html#method.into_encoded_bytes)
|
||||
- [`std::ffi::OsStr::from_encoded_bytes_unchecked`](https://doc.rust-lang.org/stable/std/ffi/struct.OsStr.html#method.from_encoded_bytes_unchecked)
|
||||
- [`std::ffi::OsStr::as_encoded_bytes`](https://doc.rust-lang.org/stable/std/ffi/struct.OsStr.html#method.as_encoded_bytes)
|
||||
- [`std::io::Error::other`](https://doc.rust-lang.org/stable/std/io/struct.Error.html#method.other)
|
||||
- [`impl TryFrom<char> for u16`](https://doc.rust-lang.org/stable/std/primitive.u16.html#impl-TryFrom%3Cchar%3E-for-u16)
|
||||
- [`impl<T: Clone, const N: usize> From<&[T; N]> for Vec<T>`](https://doc.rust-lang.org/stable/std/vec/struct.Vec.html#impl-From%3C%26%5BT;+N%5D%3E-for-Vec%3CT,+Global%3E)
|
||||
- [`impl<T: Clone, const N: usize> From<&mut [T; N]> for Vec<T>`](https://doc.rust-lang.org/stable/std/vec/struct.Vec.html#impl-From%3C%26mut+%5BT;+N%5D%3E-for-Vec%3CT,+Global%3E)
|
||||
- [`impl<T, const N: usize> From<[T; N]> for Arc<[T]>`](https://doc.rust-lang.org/stable/std/sync/struct.Arc.html#impl-From%3C%5BT;+N%5D%3E-for-Arc%3C%5BT%5D,+Global%3E)
|
||||
- [`impl<T, const N: usize> From<[T; N]> for Rc<[T]>`](https://doc.rust-lang.org/stable/std/rc/struct.Rc.html#impl-From%3C%5BT;+N%5D%3E-for-Rc%3C%5BT%5D,+Global%3E)
|
||||
|
||||
These APIs are now stable in const contexts:
|
||||
|
||||
- [`core::mem::transmute_copy`](https://doc.rust-lang.org/beta/std/mem/fn.transmute_copy.html)
|
||||
- [`str::is_ascii`](https://doc.rust-lang.org/beta/std/primitive.str.html#method.is_ascii)
|
||||
- [`[u8]::is_ascii`](https://doc.rust-lang.org/beta/std/primitive.slice.html#method.is_ascii)
|
||||
|
||||
<a id="1.74.0-Cargo"></a>
|
||||
|
||||
Cargo
|
||||
-----
|
||||
|
||||
- [fix: Set MSRV for internal packages](https://github.com/rust-lang/cargo/pull/12381/)
|
||||
- [config: merge lists in precedence order](https://github.com/rust-lang/cargo/pull/12515/)
|
||||
- [fix(update): Clarify meaning of --aggressive as --recursive](https://github.com/rust-lang/cargo/pull/12544/)
|
||||
- [fix(update): Make `-p` more convenient by being positional](https://github.com/rust-lang/cargo/pull/12545/)
|
||||
- [feat(help): Add styling to help output ](https://github.com/rust-lang/cargo/pull/12578/)
|
||||
- [feat(pkgid): Allow incomplete versions when unambigious](https://github.com/rust-lang/cargo/pull/12614/)
|
||||
- [feat: stabilize credential-process and registry-auth](https://github.com/rust-lang/cargo/pull/12649/)
|
||||
- [feat(cli): Add '-n' to dry-run](https://github.com/rust-lang/cargo/pull/12660/)
|
||||
- [Add support for `target.'cfg(..)'.linker`](https://github.com/rust-lang/cargo/pull/12535/)
|
||||
- [Stabilize `--keep-going`](https://github.com/rust-lang/cargo/pull/12568/)
|
||||
- [feat: Stabilize lints](https://github.com/rust-lang/cargo/pull/12648/)
|
||||
|
||||
<a id="1.74.0-Rustdoc"></a>
|
||||
|
||||
Rustdoc
|
||||
-------
|
||||
|
||||
- [Add warning block support in rustdoc](https://github.com/rust-lang/rust/pull/106561/)
|
||||
- [Accept additional user-defined syntax classes in fenced code blocks](https://github.com/rust-lang/rust/pull/110800/)
|
||||
- [rustdoc-search: add support for type parameters](https://github.com/rust-lang/rust/pull/112725/)
|
||||
- [rustdoc: show inner enum and struct in type definition for concrete type](https://github.com/rust-lang/rust/pull/114855/)
|
||||
|
||||
<a id="1.74.0-Compatibility-Notes"></a>
|
||||
|
||||
Compatibility Notes
|
||||
-------------------
|
||||
|
||||
- [Raise minimum supported Apple OS versions](https://github.com/rust-lang/rust/pull/104385/)
|
||||
- [make Cell::swap panic if the Cells partially overlap](https://github.com/rust-lang/rust/pull/114795/)
|
||||
- [Reject invalid crate names in `--extern`](https://github.com/rust-lang/rust/pull/116001/)
|
||||
- [Don't resolve generic impls that may be shadowed by dyn built-in impls](https://github.com/rust-lang/rust/pull/114941/)
|
||||
|
||||
<a id="1.74.0-Internal-Changes"></a>
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
These changes do not affect any public interfaces of Rust, but they represent
|
||||
significant improvements to the performance or internals of rustc and related
|
||||
tools.
|
||||
|
||||
None this cycle.
|
||||
|
||||
Version 1.73.0 (2023-10-05)
|
||||
==========================
|
||||
|
||||
@ -11,7 +133,7 @@ Language
|
||||
- [Support interpolated block for `try` and `async` in macros.](https://github.com/rust-lang/rust/pull/112953/)
|
||||
- [Make `unconditional_recursion` lint detect recursive drops.](https://github.com/rust-lang/rust/pull/113902/)
|
||||
- [Future compatibility warning for some impls being incorrectly considered not overlapping.](https://github.com/rust-lang/rust/pull/114023/)
|
||||
- [The `invalid_reference_casting` lint is now **deny-by-default** (instead of allow-by-default)](https://github.com/rust-lang/rust/pull/112431)
|
||||
- [The `invalid_reference_casting` lint is now **deny-by-default** (instead of allow-by-default)](https://github.com/rust-lang/rust/pull/112431)
|
||||
|
||||
<a id="1.73.0-Compiler"></a>
|
||||
|
||||
@ -61,7 +183,7 @@ Stabilized APIs
|
||||
- [`std::ffi::FromBytesUntilNulError`](https://doc.rust-lang.org/stable/std/ffi/struct.FromBytesUntilNulError.html)
|
||||
- [`std::os::unix::fs::chown`](https://doc.rust-lang.org/stable/std/os/unix/fs/fn.chown.html)
|
||||
- [`std::os::unix::fs::fchown`](https://doc.rust-lang.org/stable/std/os/unix/fs/fn.fchown.html)
|
||||
- [`std::os::unix::fs::lfchown`](https://doc.rust-lang.org/stable/std/os/unix/fs/fn.lchown.html)
|
||||
- [`std::os::unix::fs::lchown`](https://doc.rust-lang.org/stable/std/os/unix/fs/fn.lchown.html)
|
||||
- [`LocalKey::<Cell<T>>::get`](https://doc.rust-lang.org/stable/std/thread/struct.LocalKey.html#method.get)
|
||||
- [`LocalKey::<Cell<T>>::set`](https://doc.rust-lang.org/stable/std/thread/struct.LocalKey.html#method.set)
|
||||
- [`LocalKey::<Cell<T>>::take`](https://doc.rust-lang.org/stable/std/thread/struct.LocalKey.html#method.take)
|
||||
@ -229,6 +351,7 @@ Compatibility Notes
|
||||
this should only impact users of other registries, or people who don't publish
|
||||
to a registry.
|
||||
[#12291](https://github.com/rust-lang/cargo/pull/12291)
|
||||
- [Demoted `mips*-unknown-linux-gnu*` targets from host tier 2 to target tier 3 support.](https://github.com/rust-lang/rust/pull/113274)
|
||||
|
||||
Version 1.71.1 (2023-08-03)
|
||||
===========================
|
||||
|
||||
@ -13,6 +13,7 @@ rustc_codegen_ssa = { path = "../rustc_codegen_ssa" }
|
||||
# Make sure rustc_smir ends up in the sysroot, because this
|
||||
# crate is intended to be used by stable MIR consumers, which are not in-tree
|
||||
rustc_smir = { path = "../rustc_smir" }
|
||||
stable_mir = { path = "../stable_mir" }
|
||||
|
||||
[dependencies.jemalloc-sys]
|
||||
version = "0.5.0"
|
||||
|
||||
@ -157,8 +157,10 @@ pub trait LayoutCalculator {
|
||||
// for non-ZST uninhabited data (mostly partial initialization).
|
||||
let absent = |fields: &IndexSlice<FieldIdx, Layout<'_>>| {
|
||||
let uninhabited = fields.iter().any(|f| f.abi().is_uninhabited());
|
||||
let is_zst = fields.iter().all(|f| f.0.is_zst());
|
||||
uninhabited && is_zst
|
||||
// We cannot ignore alignment; that might lead us to entirely discard a variant and
|
||||
// produce an enum that is less aligned than it should be!
|
||||
let is_1zst = fields.iter().all(|f| f.0.is_1zst());
|
||||
uninhabited && is_1zst
|
||||
};
|
||||
let (present_first, present_second) = {
|
||||
let mut present_variants = variants
|
||||
@ -357,10 +359,8 @@ pub trait LayoutCalculator {
|
||||
// It'll fit, but we need to make some adjustments.
|
||||
match layout.fields {
|
||||
FieldsShape::Arbitrary { ref mut offsets, .. } => {
|
||||
for (j, offset) in offsets.iter_enumerated_mut() {
|
||||
if !variants[i][j].0.is_zst() {
|
||||
*offset += this_offset;
|
||||
}
|
||||
for offset in offsets.iter_mut() {
|
||||
*offset += this_offset;
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
@ -504,7 +504,7 @@ pub trait LayoutCalculator {
|
||||
// to make room for a larger discriminant.
|
||||
for field_idx in st.fields.index_by_increasing_offset() {
|
||||
let field = &field_layouts[FieldIdx::from_usize(field_idx)];
|
||||
if !field.0.is_zst() || field.align().abi.bytes() != 1 {
|
||||
if !field.0.is_1zst() {
|
||||
start_align = start_align.min(field.align().abi);
|
||||
break;
|
||||
}
|
||||
@ -603,12 +603,15 @@ pub trait LayoutCalculator {
|
||||
abi = Abi::Scalar(tag);
|
||||
} else {
|
||||
// Try to use a ScalarPair for all tagged enums.
|
||||
// That's possible only if we can find a common primitive type for all variants.
|
||||
let mut common_prim = None;
|
||||
let mut common_prim_initialized_in_all_variants = true;
|
||||
for (field_layouts, layout_variant) in iter::zip(variants, &layout_variants) {
|
||||
let FieldsShape::Arbitrary { ref offsets, .. } = layout_variant.fields else {
|
||||
panic!();
|
||||
};
|
||||
// We skip *all* ZST here and later check if we are good in terms of alignment.
|
||||
// This lets us handle some cases involving aligned ZST.
|
||||
let mut fields = iter::zip(field_layouts, offsets).filter(|p| !p.0.0.is_zst());
|
||||
let (field, offset) = match (fields.next(), fields.next()) {
|
||||
(None, None) => {
|
||||
@ -954,9 +957,6 @@ fn univariant(
|
||||
};
|
||||
|
||||
(
|
||||
// Place ZSTs first to avoid "interesting offsets", especially with only one
|
||||
// or two non-ZST fields. This helps Scalar/ScalarPair layouts.
|
||||
!f.0.is_zst(),
|
||||
// Then place largest alignments first.
|
||||
cmp::Reverse(alignment_group_key(f)),
|
||||
// Then prioritize niche placement within alignment group according to
|
||||
@ -1073,9 +1073,10 @@ fn univariant(
|
||||
let size = min_size.align_to(align.abi);
|
||||
let mut layout_of_single_non_zst_field = None;
|
||||
let mut abi = Abi::Aggregate { sized };
|
||||
// Unpack newtype ABIs and find scalar pairs.
|
||||
// Try to make this a Scalar/ScalarPair.
|
||||
if sized && size.bytes() > 0 {
|
||||
// All other fields must be ZSTs.
|
||||
// We skip *all* ZST here and later check if we are good in terms of alignment.
|
||||
// This lets us handle some cases involving aligned ZST.
|
||||
let mut non_zst_fields = fields.iter_enumerated().filter(|&(_, f)| !f.0.is_zst());
|
||||
|
||||
match (non_zst_fields.next(), non_zst_fields.next(), non_zst_fields.next()) {
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
#![cfg_attr(feature = "nightly", feature(step_trait, rustc_attrs, min_specialization))]
|
||||
#![cfg_attr(all(not(bootstrap), feature = "nightly"), allow(internal_features))]
|
||||
#![cfg_attr(feature = "nightly", allow(internal_features))]
|
||||
|
||||
use std::fmt;
|
||||
#[cfg(feature = "nightly")]
|
||||
@ -1300,12 +1300,18 @@ impl Abi {
|
||||
matches!(*self, Abi::Uninhabited)
|
||||
}
|
||||
|
||||
/// Returns `true` is this is a scalar type
|
||||
/// Returns `true` if this is a scalar type
|
||||
#[inline]
|
||||
pub fn is_scalar(&self) -> bool {
|
||||
matches!(*self, Abi::Scalar(_))
|
||||
}
|
||||
|
||||
/// Returns `true` if this is a bool
|
||||
#[inline]
|
||||
pub fn is_bool(&self) -> bool {
|
||||
matches!(*self, Abi::Scalar(s) if s.is_bool())
|
||||
}
|
||||
|
||||
/// Returns the fixed alignment of this ABI, if any is mandated.
|
||||
pub fn inherent_align<C: HasDataLayout>(&self, cx: &C) -> Option<AbiAndPrefAlign> {
|
||||
Some(match *self {
|
||||
@ -1348,6 +1354,23 @@ impl Abi {
|
||||
Abi::Uninhabited | Abi::Aggregate { .. } => Abi::Aggregate { sized: true },
|
||||
}
|
||||
}
|
||||
|
||||
pub fn eq_up_to_validity(&self, other: &Self) -> bool {
|
||||
match (self, other) {
|
||||
// Scalar, Vector, ScalarPair have `Scalar` in them where we ignore validity ranges.
|
||||
// We do *not* ignore the sign since it matters for some ABIs (e.g. s390x).
|
||||
(Abi::Scalar(l), Abi::Scalar(r)) => l.primitive() == r.primitive(),
|
||||
(
|
||||
Abi::Vector { element: element_l, count: count_l },
|
||||
Abi::Vector { element: element_r, count: count_r },
|
||||
) => element_l.primitive() == element_r.primitive() && count_l == count_r,
|
||||
(Abi::ScalarPair(l1, l2), Abi::ScalarPair(r1, r2)) => {
|
||||
l1.primitive() == r1.primitive() && l2.primitive() == r2.primitive()
|
||||
}
|
||||
// Everything else must be strictly identical.
|
||||
_ => self == other,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Hash, Clone, Debug)]
|
||||
@ -1660,15 +1683,25 @@ pub struct PointeeInfo {
|
||||
|
||||
impl LayoutS {
|
||||
/// Returns `true` if the layout corresponds to an unsized type.
|
||||
#[inline]
|
||||
pub fn is_unsized(&self) -> bool {
|
||||
self.abi.is_unsized()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn is_sized(&self) -> bool {
|
||||
self.abi.is_sized()
|
||||
}
|
||||
|
||||
/// Returns `true` if the type is sized and a 1-ZST (meaning it has size 0 and alignment 1).
|
||||
pub fn is_1zst(&self) -> bool {
|
||||
self.is_sized() && self.size.bytes() == 0 && self.align.abi.bytes() == 1
|
||||
}
|
||||
|
||||
/// Returns `true` if the type is a ZST and not unsized.
|
||||
///
|
||||
/// Note that this does *not* imply that the type is irrelevant for layout! It can still have
|
||||
/// non-trivial alignment constraints. You probably want to use `is_1zst` instead.
|
||||
pub fn is_zst(&self) -> bool {
|
||||
match self.abi {
|
||||
Abi::Scalar(_) | Abi::ScalarPair(..) | Abi::Vector { .. } => false,
|
||||
@ -1676,6 +1709,22 @@ impl LayoutS {
|
||||
Abi::Aggregate { sized } => sized && self.size.bytes() == 0,
|
||||
}
|
||||
}
|
||||
|
||||
/// Checks if these two `Layout` are equal enough to be considered "the same for all function
|
||||
/// call ABIs". Note however that real ABIs depend on more details that are not reflected in the
|
||||
/// `Layout`; the `PassMode` need to be compared as well.
|
||||
pub fn eq_abi(&self, other: &Self) -> bool {
|
||||
// The one thing that we are not capturing here is that for unsized types, the metadata must
|
||||
// also have the same ABI, and moreover that the same metadata leads to the same size. The
|
||||
// 2nd point is quite hard to check though.
|
||||
self.size == other.size
|
||||
&& self.is_sized() == other.is_sized()
|
||||
&& self.abi.eq_up_to_validity(&other.abi)
|
||||
&& self.abi.is_bool() == other.abi.is_bool()
|
||||
&& self.align.abi == other.align.abi
|
||||
&& self.max_repr_align == other.max_repr_align
|
||||
&& self.unadjusted_abi_align == other.unadjusted_abi_align
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
|
||||
@ -24,7 +24,7 @@
|
||||
#![deny(unsafe_op_in_unsafe_fn)]
|
||||
#![deny(rustc::untranslatable_diagnostic)]
|
||||
#![deny(rustc::diagnostic_outside_of_impl)]
|
||||
#![cfg_attr(not(bootstrap), allow(internal_features))]
|
||||
#![allow(internal_features)]
|
||||
#![allow(clippy::mut_from_ref)] // Arena allocators are one of the places where this pattern is fine.
|
||||
|
||||
use smallvec::SmallVec;
|
||||
@ -37,9 +37,10 @@ use std::ptr::{self, NonNull};
|
||||
use std::slice;
|
||||
use std::{cmp, intrinsics};
|
||||
|
||||
/// This calls the passed function while ensuring it won't be inlined into the caller.
|
||||
#[inline(never)]
|
||||
#[cold]
|
||||
fn cold_path<F: FnOnce() -> R, R>(f: F) -> R {
|
||||
fn outline<F: FnOnce() -> R, R>(f: F) -> R {
|
||||
f()
|
||||
}
|
||||
|
||||
@ -600,7 +601,7 @@ impl DroplessArena {
|
||||
unsafe { self.write_from_iter(iter, len, mem) }
|
||||
}
|
||||
(_, _) => {
|
||||
cold_path(move || -> &mut [T] {
|
||||
outline(move || -> &mut [T] {
|
||||
let mut vec: SmallVec<[_; 8]> = iter.collect();
|
||||
if vec.is_empty() {
|
||||
return &mut [];
|
||||
|
||||
@ -33,7 +33,7 @@ use rustc_macros::HashStable_Generic;
|
||||
use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
|
||||
use rustc_span::source_map::{respan, Spanned};
|
||||
use rustc_span::symbol::{kw, sym, Ident, Symbol};
|
||||
use rustc_span::{Span, DUMMY_SP};
|
||||
use rustc_span::{ErrorGuaranteed, Span, DUMMY_SP};
|
||||
use std::fmt;
|
||||
use std::mem;
|
||||
use thin_vec::{thin_vec, ThinVec};
|
||||
@ -1426,7 +1426,7 @@ pub enum ExprKind {
|
||||
/// of `if` / `while` expressions. (e.g., `if let 0 = x { .. }`).
|
||||
///
|
||||
/// `Span` represents the whole `let pat = expr` statement.
|
||||
Let(P<Pat>, P<Expr>, Span),
|
||||
Let(P<Pat>, P<Expr>, Span, Option<ErrorGuaranteed>),
|
||||
/// An `if` block, with an optional `else` block.
|
||||
///
|
||||
/// `if expr { block } else { expr }`
|
||||
@ -2092,6 +2092,10 @@ pub enum TyKind {
|
||||
Never,
|
||||
/// A tuple (`(A, B, C, D,...)`).
|
||||
Tup(ThinVec<P<Ty>>),
|
||||
/// An anonymous struct type i.e. `struct { foo: Type }`
|
||||
AnonStruct(ThinVec<FieldDef>),
|
||||
/// An anonymous union type i.e. `union { bar: Type }`
|
||||
AnonUnion(ThinVec<FieldDef>),
|
||||
/// A path (`module::module::...::Type`), optionally
|
||||
/// "qualified", e.g., `<Vec<T> as SomeTrait>::SomeType`.
|
||||
///
|
||||
|
||||
@ -99,6 +99,22 @@ impl Attribute {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn path_matches(&self, name: &[Symbol]) -> bool {
|
||||
match &self.kind {
|
||||
AttrKind::Normal(normal) => {
|
||||
normal.item.path.segments.len() == name.len()
|
||||
&& normal
|
||||
.item
|
||||
.path
|
||||
.segments
|
||||
.iter()
|
||||
.zip(name)
|
||||
.all(|(s, n)| s.args.is_none() && s.ident.name == *n)
|
||||
}
|
||||
AttrKind::DocComment(..) => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_word(&self) -> bool {
|
||||
if let AttrKind::Normal(normal) = &self.kind {
|
||||
matches!(normal.item.args, AttrArgs::Empty)
|
||||
|
||||
@ -510,6 +510,9 @@ pub fn noop_visit_ty<T: MutVisitor>(ty: &mut P<Ty>, vis: &mut T) {
|
||||
visit_vec(bounds, |bound| vis.visit_param_bound(bound));
|
||||
}
|
||||
TyKind::MacCall(mac) => vis.visit_mac_call(mac),
|
||||
TyKind::AnonStruct(fields) | TyKind::AnonUnion(fields) => {
|
||||
fields.flat_map_in_place(|field| vis.flat_map_field_def(field));
|
||||
}
|
||||
}
|
||||
vis.visit_span(span);
|
||||
visit_lazy_tts(tokens, vis);
|
||||
@ -1363,7 +1366,7 @@ pub fn noop_visit_expr<T: MutVisitor>(
|
||||
vis.visit_ty(ty);
|
||||
}
|
||||
ExprKind::AddrOf(_, _, ohs) => vis.visit_expr(ohs),
|
||||
ExprKind::Let(pat, scrutinee, _) => {
|
||||
ExprKind::Let(pat, scrutinee, _, _) => {
|
||||
vis.visit_pat(pat);
|
||||
vis.visit_expr(scrutinee);
|
||||
}
|
||||
|
||||
@ -486,6 +486,8 @@ impl Token {
|
||||
Lt | BinOp(Shl) | // associated path
|
||||
ModSep => true, // global path
|
||||
Interpolated(ref nt) => matches!(**nt, NtTy(..) | NtPath(..)),
|
||||
// For anonymous structs or unions, which only appear in specific positions
|
||||
// (type of struct fields or union fields), we don't consider them as regular types
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
@ -213,14 +213,10 @@ impl AttrTokenStream {
|
||||
.into_iter()
|
||||
}
|
||||
AttrTokenTree::Attributes(data) => {
|
||||
let mut outer_attrs = Vec::new();
|
||||
let mut inner_attrs = Vec::new();
|
||||
for attr in &data.attrs {
|
||||
match attr.style {
|
||||
crate::AttrStyle::Outer => outer_attrs.push(attr),
|
||||
crate::AttrStyle::Inner => inner_attrs.push(attr),
|
||||
}
|
||||
}
|
||||
let idx = data
|
||||
.attrs
|
||||
.partition_point(|attr| matches!(attr.style, crate::AttrStyle::Outer));
|
||||
let (outer_attrs, inner_attrs) = data.attrs.split_at(idx);
|
||||
|
||||
let mut target_tokens: Vec<_> = data
|
||||
.tokens
|
||||
@ -265,10 +261,10 @@ impl AttrTokenStream {
|
||||
"Failed to find trailing delimited group in: {target_tokens:?}"
|
||||
);
|
||||
}
|
||||
let mut flat: SmallVec<[_; 1]> = SmallVec::new();
|
||||
let mut flat: SmallVec<[_; 1]> =
|
||||
SmallVec::with_capacity(target_tokens.len() + outer_attrs.len());
|
||||
for attr in outer_attrs {
|
||||
// FIXME: Make this more efficient
|
||||
flat.extend(attr.tokens().0.clone().iter().cloned());
|
||||
flat.extend(attr.tokens().0.iter().cloned());
|
||||
}
|
||||
flat.extend(target_tokens);
|
||||
flat.into_iter()
|
||||
|
||||
@ -36,7 +36,7 @@ pub fn expr_trailing_brace(mut expr: &ast::Expr) -> Option<&ast::Expr> {
|
||||
| AssignOp(_, _, e)
|
||||
| Binary(_, _, e)
|
||||
| Break(_, Some(e))
|
||||
| Let(_, e, _)
|
||||
| Let(_, e, _, _)
|
||||
| Range(_, Some(e), _)
|
||||
| Ret(Some(e))
|
||||
| Unary(_, e)
|
||||
|
||||
@ -438,6 +438,9 @@ pub fn walk_ty<'a, V: Visitor<'a>>(visitor: &mut V, typ: &'a Ty) {
|
||||
TyKind::Infer | TyKind::ImplicitSelf | TyKind::Err => {}
|
||||
TyKind::MacCall(mac) => visitor.visit_mac_call(mac),
|
||||
TyKind::Never | TyKind::CVarArgs => {}
|
||||
TyKind::AnonStruct(ref fields, ..) | TyKind::AnonUnion(ref fields, ..) => {
|
||||
walk_list!(visitor, visit_field_def, fields)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -824,7 +827,7 @@ pub fn walk_expr<'a, V: Visitor<'a>>(visitor: &mut V, expression: &'a Expr) {
|
||||
visitor.visit_expr(subexpression);
|
||||
visitor.visit_ty(typ)
|
||||
}
|
||||
ExprKind::Let(pat, expr, _) => {
|
||||
ExprKind::Let(pat, expr, _, _) => {
|
||||
visitor.visit_pat(pat);
|
||||
visitor.visit_expr(expr);
|
||||
}
|
||||
|
||||
@ -29,10 +29,6 @@ ast_lowering_bad_return_type_notation_inputs =
|
||||
argument types not allowed with return type notation
|
||||
.suggestion = remove the input types
|
||||
|
||||
ast_lowering_bad_return_type_notation_needs_dots =
|
||||
return type notation arguments must be elided with `..`
|
||||
.suggestion = add `..`
|
||||
|
||||
ast_lowering_bad_return_type_notation_output =
|
||||
return type not allowed with return type notation
|
||||
.suggestion = remove the return type
|
||||
|
||||
@ -152,13 +152,14 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
let ohs = self.lower_expr(ohs);
|
||||
hir::ExprKind::AddrOf(*k, *m, ohs)
|
||||
}
|
||||
ExprKind::Let(pat, scrutinee, span) => {
|
||||
ExprKind::Let(pat, scrutinee, span, is_recovered) => {
|
||||
hir::ExprKind::Let(self.arena.alloc(hir::Let {
|
||||
hir_id: self.next_id(),
|
||||
span: self.lower_span(*span),
|
||||
pat: self.lower_pat(pat),
|
||||
ty: None,
|
||||
init: self.lower_expr(scrutinee),
|
||||
is_recovered: *is_recovered,
|
||||
}))
|
||||
}
|
||||
ExprKind::If(cond, then, else_opt) => {
|
||||
@ -558,13 +559,14 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
fn lower_arm(&mut self, arm: &Arm) -> hir::Arm<'hir> {
|
||||
let pat = self.lower_pat(&arm.pat);
|
||||
let guard = arm.guard.as_ref().map(|cond| {
|
||||
if let ExprKind::Let(pat, scrutinee, span) = &cond.kind {
|
||||
if let ExprKind::Let(pat, scrutinee, span, is_recovered) = &cond.kind {
|
||||
hir::Guard::IfLet(self.arena.alloc(hir::Let {
|
||||
hir_id: self.next_id(),
|
||||
span: self.lower_span(*span),
|
||||
pat: self.lower_pat(pat),
|
||||
ty: None,
|
||||
init: self.lower_expr(scrutinee),
|
||||
is_recovered: *is_recovered,
|
||||
}))
|
||||
} else {
|
||||
hir::Guard::If(self.lower_expr(cond))
|
||||
|
||||
@ -2,19 +2,17 @@ use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::sorted_map::SortedMap;
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::def_id::LocalDefId;
|
||||
use rustc_hir::definitions;
|
||||
use rustc_hir::intravisit::{self, Visitor};
|
||||
use rustc_hir::*;
|
||||
use rustc_index::{Idx, IndexVec};
|
||||
use rustc_middle::span_bug;
|
||||
use rustc_session::Session;
|
||||
use rustc_span::source_map::SourceMap;
|
||||
use rustc_middle::ty::TyCtxt;
|
||||
use rustc_span::{Span, DUMMY_SP};
|
||||
|
||||
/// A visitor that walks over the HIR and collects `Node`s into a HIR map.
|
||||
pub(super) struct NodeCollector<'a, 'hir> {
|
||||
/// Source map
|
||||
source_map: &'a SourceMap,
|
||||
tcx: TyCtxt<'hir>,
|
||||
|
||||
bodies: &'a SortedMap<ItemLocalId, &'hir Body<'hir>>,
|
||||
|
||||
/// Outputs
|
||||
@ -25,14 +23,11 @@ pub(super) struct NodeCollector<'a, 'hir> {
|
||||
parent_node: hir::ItemLocalId,
|
||||
|
||||
owner: OwnerId,
|
||||
|
||||
definitions: &'a definitions::Definitions,
|
||||
}
|
||||
|
||||
#[instrument(level = "debug", skip(sess, definitions, bodies))]
|
||||
#[instrument(level = "debug", skip(tcx, bodies))]
|
||||
pub(super) fn index_hir<'hir>(
|
||||
sess: &Session,
|
||||
definitions: &definitions::Definitions,
|
||||
tcx: TyCtxt<'hir>,
|
||||
item: hir::OwnerNode<'hir>,
|
||||
bodies: &SortedMap<ItemLocalId, &'hir Body<'hir>>,
|
||||
) -> (IndexVec<ItemLocalId, Option<ParentedNode<'hir>>>, FxHashMap<LocalDefId, ItemLocalId>) {
|
||||
@ -42,8 +37,7 @@ pub(super) fn index_hir<'hir>(
|
||||
// used.
|
||||
nodes.push(Some(ParentedNode { parent: ItemLocalId::INVALID, node: item.into() }));
|
||||
let mut collector = NodeCollector {
|
||||
source_map: sess.source_map(),
|
||||
definitions,
|
||||
tcx,
|
||||
owner: item.def_id(),
|
||||
parent_node: ItemLocalId::new(0),
|
||||
nodes,
|
||||
@ -79,11 +73,17 @@ impl<'a, 'hir> NodeCollector<'a, 'hir> {
|
||||
span,
|
||||
"inconsistent HirId at `{:?}` for `{:?}`: \
|
||||
current_dep_node_owner={} ({:?}), hir_id.owner={} ({:?})",
|
||||
self.source_map.span_to_diagnostic_string(span),
|
||||
self.tcx.sess.source_map().span_to_diagnostic_string(span),
|
||||
node,
|
||||
self.definitions.def_path(self.owner.def_id).to_string_no_crate_verbose(),
|
||||
self.tcx
|
||||
.definitions_untracked()
|
||||
.def_path(self.owner.def_id)
|
||||
.to_string_no_crate_verbose(),
|
||||
self.owner,
|
||||
self.definitions.def_path(hir_id.owner.def_id).to_string_no_crate_verbose(),
|
||||
self.tcx
|
||||
.definitions_untracked()
|
||||
.def_path(hir_id.owner.def_id)
|
||||
.to_string_no_crate_verbose(),
|
||||
hir_id.owner,
|
||||
)
|
||||
}
|
||||
|
||||
@ -1308,7 +1308,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
|
||||
fn lower_asyncness(&mut self, a: Async) -> hir::IsAsync {
|
||||
match a {
|
||||
Async::Yes { .. } => hir::IsAsync::Async,
|
||||
Async::Yes { span, .. } => hir::IsAsync::Async(span),
|
||||
Async::No => hir::IsAsync::NotAsync,
|
||||
}
|
||||
}
|
||||
|
||||
@ -153,6 +153,7 @@ trait ResolverAstLoweringExt {
|
||||
fn get_label_res(&self, id: NodeId) -> Option<NodeId>;
|
||||
fn get_lifetime_res(&self, id: NodeId) -> Option<LifetimeRes>;
|
||||
fn take_extra_lifetime_params(&mut self, id: NodeId) -> Vec<(Ident, NodeId, LifetimeRes)>;
|
||||
fn remap_extra_lifetime_params(&mut self, from: NodeId, to: NodeId);
|
||||
fn decl_macro_kind(&self, def_id: LocalDefId) -> MacroKind;
|
||||
}
|
||||
|
||||
@ -213,6 +214,11 @@ impl ResolverAstLoweringExt for ResolverAstLowering {
|
||||
self.extra_lifetime_params_map.remove(&id).unwrap_or_default()
|
||||
}
|
||||
|
||||
fn remap_extra_lifetime_params(&mut self, from: NodeId, to: NodeId) {
|
||||
let lifetimes = self.extra_lifetime_params_map.remove(&from).unwrap_or_default();
|
||||
self.extra_lifetime_params_map.insert(to, lifetimes);
|
||||
}
|
||||
|
||||
fn decl_macro_kind(&self, def_id: LocalDefId) -> MacroKind {
|
||||
self.builtin_macro_kinds.get(&def_id).copied().unwrap_or(MacroKind::Bang)
|
||||
}
|
||||
@ -236,7 +242,7 @@ enum ImplTraitContext {
|
||||
ReturnPositionOpaqueTy {
|
||||
/// Origin: Either OpaqueTyOrigin::FnReturn or OpaqueTyOrigin::AsyncFn,
|
||||
origin: hir::OpaqueTyOrigin,
|
||||
in_trait: bool,
|
||||
fn_kind: FnDeclKind,
|
||||
},
|
||||
/// Impl trait in type aliases.
|
||||
TypeAliasesOpaqueTy { in_assoc_ty: bool },
|
||||
@ -312,7 +318,7 @@ impl std::fmt::Display for ImplTraitPosition {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
enum FnDeclKind {
|
||||
Fn,
|
||||
Inherent,
|
||||
@ -665,8 +671,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
} else {
|
||||
(None, None)
|
||||
};
|
||||
let (nodes, parenting) =
|
||||
index::index_hir(self.tcx.sess, &*self.tcx.definitions_untracked(), node, &bodies);
|
||||
let (nodes, parenting) = index::index_hir(self.tcx, node, &bodies);
|
||||
let nodes = hir::OwnerNodes { opt_hash_including_bodies, nodes, bodies };
|
||||
let attrs = hir::AttributeMap { map: attrs, opt_hash: attrs_hash };
|
||||
|
||||
@ -765,7 +770,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
/// Intercept all spans entering HIR.
|
||||
/// Mark a span as relative to the current owning item.
|
||||
fn lower_span(&self, span: Span) -> Span {
|
||||
if self.tcx.sess.opts.incremental_relative_spans() {
|
||||
if self.tcx.sess.opts.incremental.is_some() {
|
||||
span.with_parent(Some(self.current_hir_id_owner.def_id))
|
||||
} else {
|
||||
// Do not make spans relative when not using incremental compilation.
|
||||
@ -1089,6 +1094,11 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
// constructing the HIR for `impl bounds...` and then lowering that.
|
||||
|
||||
let impl_trait_node_id = self.next_node_id();
|
||||
// Shift `impl Trait` lifetime captures from the associated type bound's
|
||||
// node id to the opaque node id, so that the opaque can actually use
|
||||
// these lifetime bounds.
|
||||
self.resolver
|
||||
.remap_extra_lifetime_params(constraint.id, impl_trait_node_id);
|
||||
|
||||
self.with_dyn_type_scope(false, |this| {
|
||||
let node_id = this.next_node_id();
|
||||
@ -1293,6 +1303,18 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
TyKind::Err => {
|
||||
hir::TyKind::Err(self.tcx.sess.delay_span_bug(t.span, "TyKind::Err lowered"))
|
||||
}
|
||||
// FIXME(unnamed_fields): IMPLEMENTATION IN PROGRESS
|
||||
#[allow(rustc::untranslatable_diagnostic)]
|
||||
#[allow(rustc::diagnostic_outside_of_impl)]
|
||||
TyKind::AnonStruct(ref _fields) => hir::TyKind::Err(
|
||||
self.tcx.sess.span_err(t.span, "anonymous structs are unimplemented"),
|
||||
),
|
||||
// FIXME(unnamed_fields): IMPLEMENTATION IN PROGRESS
|
||||
#[allow(rustc::untranslatable_diagnostic)]
|
||||
#[allow(rustc::diagnostic_outside_of_impl)]
|
||||
TyKind::AnonUnion(ref _fields) => hir::TyKind::Err(
|
||||
self.tcx.sess.span_err(t.span, "anonymous unions are unimplemented"),
|
||||
),
|
||||
TyKind::Slice(ty) => hir::TyKind::Slice(self.lower_ty(ty, itctx)),
|
||||
TyKind::Ptr(mt) => hir::TyKind::Ptr(self.lower_mt(mt, itctx)),
|
||||
TyKind::Ref(region, mt) => {
|
||||
@ -1389,13 +1411,13 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
TyKind::ImplTrait(def_node_id, bounds) => {
|
||||
let span = t.span;
|
||||
match itctx {
|
||||
ImplTraitContext::ReturnPositionOpaqueTy { origin, in_trait } => self
|
||||
ImplTraitContext::ReturnPositionOpaqueTy { origin, fn_kind } => self
|
||||
.lower_opaque_impl_trait(
|
||||
span,
|
||||
*origin,
|
||||
*def_node_id,
|
||||
bounds,
|
||||
*in_trait,
|
||||
Some(*fn_kind),
|
||||
itctx,
|
||||
),
|
||||
&ImplTraitContext::TypeAliasesOpaqueTy { in_assoc_ty } => self
|
||||
@ -1404,17 +1426,11 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
hir::OpaqueTyOrigin::TyAlias { in_assoc_ty },
|
||||
*def_node_id,
|
||||
bounds,
|
||||
false,
|
||||
None,
|
||||
itctx,
|
||||
),
|
||||
ImplTraitContext::Universal => {
|
||||
let span = t.span;
|
||||
self.create_def(
|
||||
self.current_hir_id_owner.def_id,
|
||||
*def_node_id,
|
||||
DefPathData::ImplTrait,
|
||||
span,
|
||||
);
|
||||
|
||||
// HACK: pprust breaks strings with newlines when the type
|
||||
// gets too long. We don't want these to show up in compiler
|
||||
@ -1425,6 +1441,12 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
span,
|
||||
);
|
||||
|
||||
self.create_def(
|
||||
self.current_hir_id_owner.def_id,
|
||||
*def_node_id,
|
||||
DefPathData::TypeNs(ident.name),
|
||||
span,
|
||||
);
|
||||
let (param, bounds, path) = self.lower_universal_param_and_bounds(
|
||||
*def_node_id,
|
||||
span,
|
||||
@ -1511,7 +1533,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
origin: hir::OpaqueTyOrigin,
|
||||
opaque_ty_node_id: NodeId,
|
||||
bounds: &GenericBounds,
|
||||
in_trait: bool,
|
||||
fn_kind: Option<FnDeclKind>,
|
||||
itctx: &ImplTraitContext,
|
||||
) -> hir::TyKind<'hir> {
|
||||
// Make sure we know that some funky desugaring has been going on here.
|
||||
@ -1528,10 +1550,22 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
Vec::new()
|
||||
}
|
||||
hir::OpaqueTyOrigin::FnReturn(..) => {
|
||||
// in fn return position, like the `fn test<'a>() -> impl Debug + 'a`
|
||||
// example, we only need to duplicate lifetimes that appear in the
|
||||
// bounds, since those are the only ones that are captured by the opaque.
|
||||
lifetime_collector::lifetimes_in_bounds(&self.resolver, bounds)
|
||||
if let FnDeclKind::Impl | FnDeclKind::Trait =
|
||||
fn_kind.expect("expected RPITs to be lowered with a FnKind")
|
||||
{
|
||||
// return-position impl trait in trait was decided to capture all
|
||||
// in-scope lifetimes, which we collect for all opaques during resolution.
|
||||
self.resolver
|
||||
.take_extra_lifetime_params(opaque_ty_node_id)
|
||||
.into_iter()
|
||||
.map(|(ident, id, _)| Lifetime { id, ident })
|
||||
.collect()
|
||||
} else {
|
||||
// in fn return position, like the `fn test<'a>() -> impl Debug + 'a`
|
||||
// example, we only need to duplicate lifetimes that appear in the
|
||||
// bounds, since those are the only ones that are captured by the opaque.
|
||||
lifetime_collector::lifetimes_in_bounds(&self.resolver, bounds)
|
||||
}
|
||||
}
|
||||
hir::OpaqueTyOrigin::AsyncFn(..) => {
|
||||
unreachable!("should be using `lower_async_fn_ret_ty`")
|
||||
@ -1542,7 +1576,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
self.lower_opaque_inner(
|
||||
opaque_ty_node_id,
|
||||
origin,
|
||||
in_trait,
|
||||
matches!(fn_kind, Some(FnDeclKind::Trait)),
|
||||
captured_lifetimes_to_duplicate,
|
||||
span,
|
||||
opaque_ty_span,
|
||||
@ -1630,7 +1664,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
lifetime.ident,
|
||||
));
|
||||
|
||||
// Now make an arg that we can use for the substs of the opaque tykind.
|
||||
// Now make an arg that we can use for the generic params of the opaque tykind.
|
||||
let id = self.next_node_id();
|
||||
let lifetime_arg = self.new_named_lifetime_with_res(id, lifetime.ident, res);
|
||||
let duplicated_lifetime_def_id = self.local_def_id(duplicated_lifetime_node_id);
|
||||
@ -1790,12 +1824,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
}
|
||||
|
||||
let fn_def_id = self.local_def_id(fn_node_id);
|
||||
self.lower_async_fn_ret_ty(
|
||||
&decl.output,
|
||||
fn_def_id,
|
||||
ret_id,
|
||||
matches!(kind, FnDeclKind::Trait),
|
||||
)
|
||||
self.lower_async_fn_ret_ty(&decl.output, fn_def_id, ret_id, kind)
|
||||
} else {
|
||||
match &decl.output {
|
||||
FnRetTy::Ty(ty) => {
|
||||
@ -1803,7 +1832,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
let fn_def_id = self.local_def_id(fn_node_id);
|
||||
ImplTraitContext::ReturnPositionOpaqueTy {
|
||||
origin: hir::OpaqueTyOrigin::FnReturn(fn_def_id),
|
||||
in_trait: matches!(kind, FnDeclKind::Trait),
|
||||
fn_kind: kind,
|
||||
}
|
||||
} else {
|
||||
let position = match kind {
|
||||
@ -1871,7 +1900,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
output: &FnRetTy,
|
||||
fn_def_id: LocalDefId,
|
||||
opaque_ty_node_id: NodeId,
|
||||
in_trait: bool,
|
||||
fn_kind: FnDeclKind,
|
||||
) -> hir::FnRetTy<'hir> {
|
||||
let span = self.lower_span(output.span());
|
||||
let opaque_ty_span = self.mark_span_with_reason(DesugaringKind::Async, span, None);
|
||||
@ -1886,7 +1915,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
let opaque_ty_ref = self.lower_opaque_inner(
|
||||
opaque_ty_node_id,
|
||||
hir::OpaqueTyOrigin::AsyncFn(fn_def_id),
|
||||
in_trait,
|
||||
matches!(fn_kind, FnDeclKind::Trait),
|
||||
captured_lifetimes,
|
||||
span,
|
||||
opaque_ty_span,
|
||||
@ -1894,7 +1923,9 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
let future_bound = this.lower_async_fn_output_type_to_future_bound(
|
||||
output,
|
||||
span,
|
||||
if in_trait && !this.tcx.features().return_position_impl_trait_in_trait {
|
||||
if let FnDeclKind::Trait = fn_kind
|
||||
&& !this.tcx.features().return_position_impl_trait_in_trait
|
||||
{
|
||||
ImplTraitContext::FeatureGated(
|
||||
ImplTraitPosition::TraitReturn,
|
||||
sym::return_position_impl_trait_in_trait,
|
||||
@ -1902,7 +1933,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
} else {
|
||||
ImplTraitContext::ReturnPositionOpaqueTy {
|
||||
origin: hir::OpaqueTyOrigin::FnReturn(fn_def_id),
|
||||
in_trait,
|
||||
fn_kind,
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
@ -1,3 +1,7 @@
|
||||
ast_passes_anon_struct_or_union_not_allowed =
|
||||
anonymous {$struct_or_union}s are not allowed outside of unnamed struct or union fields
|
||||
.label = anonymous {$struct_or_union} declared here
|
||||
|
||||
ast_passes_assoc_const_without_body =
|
||||
associated constant in `impl` without body
|
||||
.suggestion = provide a definition for the constant
|
||||
@ -113,16 +117,6 @@ ast_passes_forbidden_default =
|
||||
`default` is only allowed on items in trait impls
|
||||
.label = `default` because of this
|
||||
|
||||
ast_passes_forbidden_let =
|
||||
`let` expressions are not supported here
|
||||
.note = only supported directly in conditions of `if` and `while` expressions
|
||||
.not_supported_or = `||` operators are not supported in let chain expressions
|
||||
.not_supported_parentheses = `let`s wrapped in parentheses are not supported in a context with let chains
|
||||
|
||||
ast_passes_forbidden_let_stable =
|
||||
expected expression, found statement (`let`)
|
||||
.note = variable declaration using `let` is a statement
|
||||
|
||||
ast_passes_forbidden_lifetime_bound =
|
||||
lifetime bounds cannot be used in this context
|
||||
|
||||
@ -162,6 +156,14 @@ ast_passes_inherent_cannot_be = inherent impls cannot be {$annotation}
|
||||
ast_passes_invalid_label =
|
||||
invalid label name `{$name}`
|
||||
|
||||
ast_passes_invalid_unnamed_field =
|
||||
unnamed fields are not allowed outside of structs or unions
|
||||
.label = unnamed field declared here
|
||||
|
||||
ast_passes_invalid_unnamed_field_ty =
|
||||
unnamed fields can only have struct or union types
|
||||
.label = not a struct or union
|
||||
|
||||
ast_passes_item_underscore = `{$kind}` items in this context need a name
|
||||
.label = `_` is not a valid name for this `{$kind}` item
|
||||
|
||||
|
||||
@ -14,14 +14,12 @@ use rustc_ast::{walk_list, StaticItem};
|
||||
use rustc_ast_pretty::pprust::{self, State};
|
||||
use rustc_data_structures::fx::FxIndexMap;
|
||||
use rustc_feature::Features;
|
||||
use rustc_macros::Subdiagnostic;
|
||||
use rustc_parse::validate_attr;
|
||||
use rustc_session::lint::builtin::{
|
||||
DEPRECATED_WHERE_CLAUSE_LOCATION, MISSING_ABI, PATTERNS_IN_FNS_WITHOUT_BODY,
|
||||
};
|
||||
use rustc_session::lint::{BuiltinLintDiagnostics, LintBuffer};
|
||||
use rustc_session::Session;
|
||||
use rustc_span::source_map::Spanned;
|
||||
use rustc_span::symbol::{kw, sym, Ident};
|
||||
use rustc_span::Span;
|
||||
use rustc_target::spec::abi;
|
||||
@ -69,9 +67,6 @@ struct AstValidator<'a> {
|
||||
/// or `Foo::Bar<impl Trait>`
|
||||
is_impl_trait_banned: bool,
|
||||
|
||||
/// See [ForbiddenLetReason]
|
||||
forbidden_let_reason: Option<ForbiddenLetReason>,
|
||||
|
||||
lint_buffer: &'a mut LintBuffer,
|
||||
}
|
||||
|
||||
@ -118,26 +113,6 @@ impl<'a> AstValidator<'a> {
|
||||
self.with_tilde_const(Some(ctx), f)
|
||||
}
|
||||
|
||||
fn with_let_management(
|
||||
&mut self,
|
||||
forbidden_let_reason: Option<ForbiddenLetReason>,
|
||||
f: impl FnOnce(&mut Self, Option<ForbiddenLetReason>),
|
||||
) {
|
||||
let old = mem::replace(&mut self.forbidden_let_reason, forbidden_let_reason);
|
||||
f(self, old);
|
||||
self.forbidden_let_reason = old;
|
||||
}
|
||||
|
||||
/// Emits an error banning the `let` expression provided in the given location.
|
||||
fn ban_let_expr(&self, expr: &'a Expr, forbidden_let_reason: ForbiddenLetReason) {
|
||||
let sess = &self.session;
|
||||
if sess.opts.unstable_features.is_nightly_build() {
|
||||
sess.emit_err(errors::ForbiddenLet { span: expr.span, reason: forbidden_let_reason });
|
||||
} else {
|
||||
sess.emit_err(errors::ForbiddenLetStable { span: expr.span });
|
||||
}
|
||||
}
|
||||
|
||||
fn check_type_alias_where_clause_location(
|
||||
&mut self,
|
||||
ty_alias: &TyAlias,
|
||||
@ -223,10 +198,27 @@ impl<'a> AstValidator<'a> {
|
||||
}
|
||||
}
|
||||
}
|
||||
TyKind::AnonStruct(ref fields, ..) | TyKind::AnonUnion(ref fields, ..) => {
|
||||
walk_list!(self, visit_field_def, fields)
|
||||
}
|
||||
_ => visit::walk_ty(self, t),
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_struct_field_def(&mut self, field: &'a FieldDef) {
|
||||
if let Some(ident) = field.ident &&
|
||||
ident.name == kw::Underscore {
|
||||
self.check_unnamed_field_ty(&field.ty, ident.span);
|
||||
self.visit_vis(&field.vis);
|
||||
self.visit_ident(ident);
|
||||
self.visit_ty_common(&field.ty);
|
||||
self.walk_ty(&field.ty);
|
||||
walk_list!(self, visit_attribute, &field.attrs);
|
||||
} else {
|
||||
self.visit_field_def(field);
|
||||
}
|
||||
}
|
||||
|
||||
fn err_handler(&self) -> &rustc_errors::Handler {
|
||||
&self.session.diagnostic()
|
||||
}
|
||||
@ -264,6 +256,42 @@ impl<'a> AstValidator<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn check_unnamed_field_ty(&self, ty: &Ty, span: Span) {
|
||||
if matches!(
|
||||
&ty.kind,
|
||||
// We already checked for `kw::Underscore` before calling this function,
|
||||
// so skip the check
|
||||
TyKind::AnonStruct(..) | TyKind::AnonUnion(..)
|
||||
// If the anonymous field contains a Path as type, we can't determine
|
||||
// if the path is a valid struct or union, so skip the check
|
||||
| TyKind::Path(..)
|
||||
) {
|
||||
return;
|
||||
}
|
||||
self.err_handler().emit_err(errors::InvalidUnnamedFieldTy { span, ty_span: ty.span });
|
||||
}
|
||||
|
||||
fn deny_anon_struct_or_union(&self, ty: &Ty) {
|
||||
let struct_or_union = match &ty.kind {
|
||||
TyKind::AnonStruct(..) => "struct",
|
||||
TyKind::AnonUnion(..) => "union",
|
||||
_ => return,
|
||||
};
|
||||
self.err_handler()
|
||||
.emit_err(errors::AnonStructOrUnionNotAllowed { struct_or_union, span: ty.span });
|
||||
}
|
||||
|
||||
fn deny_unnamed_field(&self, field: &FieldDef) {
|
||||
if let Some(ident) = field.ident &&
|
||||
ident.name == kw::Underscore {
|
||||
self.err_handler()
|
||||
.emit_err(errors::InvalidUnnamedField {
|
||||
span: field.span,
|
||||
ident_span: ident.span
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn check_trait_fn_not_const(&self, constness: Const) {
|
||||
if let Const::Yes(span) = constness {
|
||||
self.session.emit_err(errors::TraitFnConst { span });
|
||||
@ -726,69 +754,9 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
|
||||
validate_attr::check_attr(&self.session.parse_sess, attr);
|
||||
}
|
||||
|
||||
fn visit_expr(&mut self, expr: &'a Expr) {
|
||||
self.with_let_management(Some(ForbiddenLetReason::GenericForbidden), |this, forbidden_let_reason| {
|
||||
match &expr.kind {
|
||||
ExprKind::Binary(Spanned { node: BinOpKind::Or, span }, lhs, rhs) => {
|
||||
let local_reason = Some(ForbiddenLetReason::NotSupportedOr(*span));
|
||||
this.with_let_management(local_reason, |this, _| this.visit_expr(lhs));
|
||||
this.with_let_management(local_reason, |this, _| this.visit_expr(rhs));
|
||||
}
|
||||
ExprKind::If(cond, then, opt_else) => {
|
||||
this.visit_block(then);
|
||||
walk_list!(this, visit_expr, opt_else);
|
||||
this.with_let_management(None, |this, _| this.visit_expr(cond));
|
||||
return;
|
||||
}
|
||||
ExprKind::Let(..) if let Some(elem) = forbidden_let_reason => {
|
||||
this.ban_let_expr(expr, elem);
|
||||
},
|
||||
ExprKind::Match(scrutinee, arms) => {
|
||||
this.visit_expr(scrutinee);
|
||||
for arm in arms {
|
||||
this.visit_expr(&arm.body);
|
||||
this.visit_pat(&arm.pat);
|
||||
walk_list!(this, visit_attribute, &arm.attrs);
|
||||
if let Some(guard) = &arm.guard {
|
||||
this.with_let_management(None, |this, _| {
|
||||
this.visit_expr(guard)
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
ExprKind::Paren(local_expr) => {
|
||||
fn has_let_expr(expr: &Expr) -> bool {
|
||||
match &expr.kind {
|
||||
ExprKind::Binary(_, lhs, rhs) => has_let_expr(lhs) || has_let_expr(rhs),
|
||||
ExprKind::Let(..) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
let local_reason = if has_let_expr(local_expr) {
|
||||
Some(ForbiddenLetReason::NotSupportedParentheses(local_expr.span))
|
||||
}
|
||||
else {
|
||||
forbidden_let_reason
|
||||
};
|
||||
this.with_let_management(local_reason, |this, _| this.visit_expr(local_expr));
|
||||
}
|
||||
ExprKind::Binary(Spanned { node: BinOpKind::And, .. }, ..) => {
|
||||
this.with_let_management(forbidden_let_reason, |this, _| visit::walk_expr(this, expr));
|
||||
return;
|
||||
}
|
||||
ExprKind::While(cond, then, opt_label) => {
|
||||
walk_list!(this, visit_label, opt_label);
|
||||
this.visit_block(then);
|
||||
this.with_let_management(None, |this, _| this.visit_expr(cond));
|
||||
return;
|
||||
}
|
||||
_ => visit::walk_expr(this, expr),
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
fn visit_ty(&mut self, ty: &'a Ty) {
|
||||
self.visit_ty_common(ty);
|
||||
self.deny_anon_struct_or_union(ty);
|
||||
self.walk_ty(ty)
|
||||
}
|
||||
|
||||
@ -803,6 +771,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
|
||||
}
|
||||
|
||||
fn visit_field_def(&mut self, field: &'a FieldDef) {
|
||||
self.deny_unnamed_field(field);
|
||||
visit::walk_field_def(self, field)
|
||||
}
|
||||
|
||||
@ -995,10 +964,38 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
|
||||
self.check_mod_file_item_asciionly(item.ident);
|
||||
}
|
||||
}
|
||||
ItemKind::Union(vdata, ..) => {
|
||||
ItemKind::Struct(vdata, generics) => match vdata {
|
||||
// Duplicating the `Visitor` logic allows catching all cases
|
||||
// of `Anonymous(Struct, Union)` outside of a field struct or union.
|
||||
//
|
||||
// Inside `visit_ty` the validator catches every `Anonymous(Struct, Union)` it
|
||||
// encounters, and only on `ItemKind::Struct` and `ItemKind::Union`
|
||||
// it uses `visit_ty_common`, which doesn't contain that specific check.
|
||||
VariantData::Struct(fields, ..) => {
|
||||
self.visit_vis(&item.vis);
|
||||
self.visit_ident(item.ident);
|
||||
self.visit_generics(generics);
|
||||
walk_list!(self, visit_struct_field_def, fields);
|
||||
walk_list!(self, visit_attribute, &item.attrs);
|
||||
return;
|
||||
}
|
||||
_ => {}
|
||||
},
|
||||
ItemKind::Union(vdata, generics) => {
|
||||
if vdata.fields().is_empty() {
|
||||
self.err_handler().emit_err(errors::FieldlessUnion { span: item.span });
|
||||
}
|
||||
match vdata {
|
||||
VariantData::Struct(fields, ..) => {
|
||||
self.visit_vis(&item.vis);
|
||||
self.visit_ident(item.ident);
|
||||
self.visit_generics(generics);
|
||||
walk_list!(self, visit_struct_field_def, fields);
|
||||
walk_list!(self, visit_attribute, &item.attrs);
|
||||
return;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
ItemKind::Const(box ConstItem { defaultness, expr: None, .. }) => {
|
||||
self.check_defaultness(item.span, *defaultness);
|
||||
@ -1518,26 +1515,9 @@ pub fn check_crate(
|
||||
outer_impl_trait: None,
|
||||
disallow_tilde_const: None,
|
||||
is_impl_trait_banned: false,
|
||||
forbidden_let_reason: Some(ForbiddenLetReason::GenericForbidden),
|
||||
lint_buffer: lints,
|
||||
};
|
||||
visit::walk_crate(&mut validator, krate);
|
||||
|
||||
validator.has_proc_macro_decls
|
||||
}
|
||||
|
||||
/// Used to forbid `let` expressions in certain syntactic locations.
|
||||
#[derive(Clone, Copy, Subdiagnostic)]
|
||||
pub(crate) enum ForbiddenLetReason {
|
||||
/// `let` is not valid and the source environment is not important
|
||||
GenericForbidden,
|
||||
/// A let chain with the `||` operator
|
||||
#[note(ast_passes_not_supported_or)]
|
||||
NotSupportedOr(#[primary_span] Span),
|
||||
/// A let chain with invalid parentheses
|
||||
///
|
||||
/// For example, `let 1 = 1 && (expr && expr)` is allowed
|
||||
/// but `(let 1 = 1 && (let 1 = 1 && (let 1 = 1))) && let a = 1` is not
|
||||
#[note(ast_passes_not_supported_parentheses)]
|
||||
NotSupportedParentheses(#[primary_span] Span),
|
||||
}
|
||||
|
||||
@ -5,27 +5,8 @@ use rustc_errors::AddToDiagnostic;
|
||||
use rustc_macros::{Diagnostic, Subdiagnostic};
|
||||
use rustc_span::{symbol::Ident, Span, Symbol};
|
||||
|
||||
use crate::ast_validation::ForbiddenLetReason;
|
||||
use crate::fluent_generated as fluent;
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(ast_passes_forbidden_let)]
|
||||
#[note]
|
||||
pub struct ForbiddenLet {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
#[subdiagnostic]
|
||||
pub(crate) reason: ForbiddenLetReason,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(ast_passes_forbidden_let_stable)]
|
||||
#[note]
|
||||
pub struct ForbiddenLetStable {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(ast_passes_keyword_lifetime)]
|
||||
pub struct KeywordLifetime {
|
||||
@ -727,3 +708,30 @@ pub struct ConstraintOnNegativeBound {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(ast_passes_invalid_unnamed_field_ty)]
|
||||
pub struct InvalidUnnamedFieldTy {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
#[label]
|
||||
pub ty_span: Span,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(ast_passes_invalid_unnamed_field)]
|
||||
pub struct InvalidUnnamedField {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
#[label]
|
||||
pub ident_span: Span,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(ast_passes_anon_struct_or_union_not_allowed)]
|
||||
pub struct AnonStructOrUnionNotAllowed {
|
||||
#[primary_span]
|
||||
#[label]
|
||||
pub span: Span,
|
||||
pub struct_or_union: &'static str,
|
||||
}
|
||||
|
||||
@ -570,6 +570,7 @@ pub fn check_crate(krate: &ast::Crate, sess: &Session, features: &Features) {
|
||||
gate_all!(builtin_syntax, "`builtin #` syntax is unstable");
|
||||
gate_all!(explicit_tail_calls, "`become` expression is experimental");
|
||||
gate_all!(generic_const_items, "generic const items are experimental");
|
||||
gate_all!(unnamed_fields, "unnamed fields are not yet fully implemented");
|
||||
|
||||
if !visitor.features.negative_bounds {
|
||||
for &span in spans.get(&sym::negative_bounds).iter().copied().flatten() {
|
||||
@ -577,11 +578,11 @@ pub fn check_crate(krate: &ast::Crate, sess: &Session, features: &Features) {
|
||||
}
|
||||
}
|
||||
|
||||
// All uses of `gate_all!` below this point were added in #65742,
|
||||
// All uses of `gate_all_legacy_dont_use!` below this point were added in #65742,
|
||||
// and subsequently disabled (with the non-early gating readded).
|
||||
// We emit an early future-incompatible warning for these.
|
||||
// New syntax gates should go above here to get a hard error gate.
|
||||
macro_rules! gate_all {
|
||||
macro_rules! gate_all_legacy_dont_use {
|
||||
($gate:ident, $msg:literal) => {
|
||||
for span in spans.get(&sym::$gate).unwrap_or(&vec![]) {
|
||||
gate_feature_post!(future_incompatible; &visitor, $gate, *span, $msg);
|
||||
@ -589,13 +590,19 @@ pub fn check_crate(krate: &ast::Crate, sess: &Session, features: &Features) {
|
||||
};
|
||||
}
|
||||
|
||||
gate_all!(trait_alias, "trait aliases are experimental");
|
||||
gate_all!(associated_type_bounds, "associated type bounds are unstable");
|
||||
gate_all!(return_type_notation, "return type notation is experimental");
|
||||
gate_all!(decl_macro, "`macro` is experimental");
|
||||
gate_all!(box_patterns, "box pattern syntax is experimental");
|
||||
gate_all!(exclusive_range_pattern, "exclusive range pattern syntax is experimental");
|
||||
gate_all!(try_blocks, "`try` blocks are unstable");
|
||||
gate_all_legacy_dont_use!(trait_alias, "trait aliases are experimental");
|
||||
gate_all_legacy_dont_use!(associated_type_bounds, "associated type bounds are unstable");
|
||||
// Despite being a new feature, `where T: Trait<Assoc(): Sized>`, which is RTN syntax now,
|
||||
// used to be gated under associated_type_bounds, which are right above, so RTN needs to
|
||||
// be too.
|
||||
gate_all_legacy_dont_use!(return_type_notation, "return type notation is experimental");
|
||||
gate_all_legacy_dont_use!(decl_macro, "`macro` is experimental");
|
||||
gate_all_legacy_dont_use!(box_patterns, "box pattern syntax is experimental");
|
||||
gate_all_legacy_dont_use!(
|
||||
exclusive_range_pattern,
|
||||
"exclusive range pattern syntax is experimental"
|
||||
);
|
||||
gate_all_legacy_dont_use!(try_blocks, "`try` blocks are unstable");
|
||||
|
||||
visit::walk_crate(&mut visitor, krate);
|
||||
}
|
||||
|
||||
@ -1064,6 +1064,14 @@ impl<'a> State<'a> {
|
||||
}
|
||||
self.pclose();
|
||||
}
|
||||
ast::TyKind::AnonStruct(fields) => {
|
||||
self.head("struct");
|
||||
self.print_record_struct_body(&fields, ty.span);
|
||||
}
|
||||
ast::TyKind::AnonUnion(fields) => {
|
||||
self.head("union");
|
||||
self.print_record_struct_body(&fields, ty.span);
|
||||
}
|
||||
ast::TyKind::Paren(typ) => {
|
||||
self.popen();
|
||||
self.print_type(typ);
|
||||
|
||||
@ -352,7 +352,7 @@ impl<'a> State<'a> {
|
||||
self.end();
|
||||
self.word(")");
|
||||
}
|
||||
ast::ExprKind::Let(pat, scrutinee, _) => {
|
||||
ast::ExprKind::Let(pat, scrutinee, _, _) => {
|
||||
self.print_let(pat, scrutinee);
|
||||
}
|
||||
ast::ExprKind::If(test, blk, elseopt) => self.print_if(test, blk, elseopt.as_deref()),
|
||||
|
||||
@ -443,7 +443,11 @@ impl<'a> State<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn print_record_struct_body(&mut self, fields: &[ast::FieldDef], span: rustc_span::Span) {
|
||||
pub(crate) fn print_record_struct_body(
|
||||
&mut self,
|
||||
fields: &[ast::FieldDef],
|
||||
span: rustc_span::Span,
|
||||
) {
|
||||
self.nbsp();
|
||||
self.bopen();
|
||||
|
||||
|
||||
@ -937,6 +937,7 @@ pub fn find_deprecation(
|
||||
#[derive(PartialEq, Debug, Encodable, Decodable, Copy, Clone)]
|
||||
pub enum ReprAttr {
|
||||
ReprInt(IntType),
|
||||
ReprRust,
|
||||
ReprC,
|
||||
ReprPacked(u32),
|
||||
ReprSimd,
|
||||
@ -985,6 +986,7 @@ pub fn parse_repr_attr(sess: &Session, attr: &Attribute) -> Vec<ReprAttr> {
|
||||
let mut recognised = false;
|
||||
if item.is_word() {
|
||||
let hint = match item.name_or_empty() {
|
||||
sym::Rust => Some(ReprRust),
|
||||
sym::C => Some(ReprC),
|
||||
sym::packed => Some(ReprPacked(1)),
|
||||
sym::simd => Some(ReprSimd),
|
||||
|
||||
@ -74,9 +74,6 @@ borrowck_higher_ranked_subtype_error =
|
||||
borrowck_lifetime_constraints_error =
|
||||
lifetime may not live long enough
|
||||
|
||||
borrowck_move_borrowed =
|
||||
cannot move out of `{$desc}` because it is borrowed
|
||||
|
||||
borrowck_move_out_place_here =
|
||||
{$place} is moved here
|
||||
|
||||
@ -166,6 +163,8 @@ borrowck_returned_lifetime_wrong =
|
||||
borrowck_returned_ref_escaped =
|
||||
returns a reference to a captured variable which escapes the closure body
|
||||
|
||||
borrowck_simd_shuffle_last_const = last argument of `simd_shuffle` is required to be a `const` item
|
||||
|
||||
borrowck_suggest_create_freash_reborrow =
|
||||
consider reborrowing the `Pin` instead of moving it
|
||||
|
||||
@ -248,12 +247,6 @@ borrowck_var_move_by_use_in_closure =
|
||||
borrowck_var_move_by_use_in_generator =
|
||||
move occurs due to use in generator
|
||||
|
||||
borrowck_var_move_by_use_place_in_closure =
|
||||
move occurs due to use of {$place} in closure
|
||||
|
||||
borrowck_var_move_by_use_place_in_generator =
|
||||
move occurs due to use of {$place} in generator
|
||||
|
||||
borrowck_var_mutable_borrow_by_use_place_in_closure =
|
||||
mutable borrow occurs due to use of {$place} in closure
|
||||
|
||||
|
||||
@ -71,7 +71,7 @@ impl<'tcx> fmt::Display for BorrowData<'tcx> {
|
||||
fn fmt(&self, w: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let kind = match self.kind {
|
||||
mir::BorrowKind::Shared => "",
|
||||
mir::BorrowKind::Shallow => "shallow ",
|
||||
mir::BorrowKind::Fake => "fake ",
|
||||
mir::BorrowKind::Mut { kind: mir::MutBorrowKind::ClosureCapture } => "uniq ",
|
||||
// FIXME: differentiate `TwoPhaseBorrow`
|
||||
mir::BorrowKind::Mut {
|
||||
|
||||
@ -49,7 +49,7 @@ pub fn categorize(context: PlaceContext) -> Option<DefUse> {
|
||||
// cross suspension points so this behavior is unproblematic.
|
||||
PlaceContext::MutatingUse(MutatingUseContext::Borrow) |
|
||||
PlaceContext::NonMutatingUse(NonMutatingUseContext::SharedBorrow) |
|
||||
PlaceContext::NonMutatingUse(NonMutatingUseContext::ShallowBorrow) |
|
||||
PlaceContext::NonMutatingUse(NonMutatingUseContext::FakeBorrow) |
|
||||
|
||||
// `PlaceMention` and `AscribeUserType` both evaluate the place, which must not
|
||||
// contain dangling references.
|
||||
|
||||
@ -1025,7 +1025,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
self.cannot_uniquely_borrow_by_two_closures(span, &desc_place, issued_span, None)
|
||||
}
|
||||
|
||||
(BorrowKind::Mut { .. }, BorrowKind::Shallow) => {
|
||||
(BorrowKind::Mut { .. }, BorrowKind::Fake) => {
|
||||
if let Some(immutable_section_description) =
|
||||
self.classify_immutable_section(issued_borrow.assigned_place)
|
||||
{
|
||||
@ -1117,11 +1117,10 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
)
|
||||
}
|
||||
|
||||
(BorrowKind::Shared, BorrowKind::Shared | BorrowKind::Shallow)
|
||||
| (
|
||||
BorrowKind::Shallow,
|
||||
BorrowKind::Mut { .. } | BorrowKind::Shared | BorrowKind::Shallow,
|
||||
) => unreachable!(),
|
||||
(BorrowKind::Shared, BorrowKind::Shared | BorrowKind::Fake)
|
||||
| (BorrowKind::Fake, BorrowKind::Mut { .. } | BorrowKind::Shared | BorrowKind::Fake) => {
|
||||
unreachable!()
|
||||
}
|
||||
};
|
||||
|
||||
if issued_spans == borrow_spans {
|
||||
@ -2130,21 +2129,27 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
/// misleading users in cases like `tests/ui/nll/borrowed-temporary-error.rs`.
|
||||
/// We could expand the analysis to suggest hoising all of the relevant parts of
|
||||
/// the users' code to make the code compile, but that could be too much.
|
||||
struct NestedStatementVisitor {
|
||||
/// We found the `prop_expr` by the way to check whether the expression is a `FormatArguments`,
|
||||
/// which is a special case since it's generated by the compiler.
|
||||
struct NestedStatementVisitor<'tcx> {
|
||||
span: Span,
|
||||
current: usize,
|
||||
found: usize,
|
||||
prop_expr: Option<&'tcx hir::Expr<'tcx>>,
|
||||
}
|
||||
|
||||
impl<'tcx> Visitor<'tcx> for NestedStatementVisitor {
|
||||
fn visit_block(&mut self, block: &hir::Block<'tcx>) {
|
||||
impl<'tcx> Visitor<'tcx> for NestedStatementVisitor<'tcx> {
|
||||
fn visit_block(&mut self, block: &'tcx hir::Block<'tcx>) {
|
||||
self.current += 1;
|
||||
walk_block(self, block);
|
||||
self.current -= 1;
|
||||
}
|
||||
fn visit_expr(&mut self, expr: &hir::Expr<'tcx>) {
|
||||
fn visit_expr(&mut self, expr: &'tcx hir::Expr<'tcx>) {
|
||||
if self.span == expr.span.source_callsite() {
|
||||
self.found = self.current;
|
||||
if self.prop_expr.is_none() {
|
||||
self.prop_expr = Some(expr);
|
||||
}
|
||||
}
|
||||
walk_expr(self, expr);
|
||||
}
|
||||
@ -2162,22 +2167,40 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
span: proper_span,
|
||||
current: 0,
|
||||
found: 0,
|
||||
prop_expr: None,
|
||||
};
|
||||
visitor.visit_stmt(stmt);
|
||||
|
||||
let typeck_results = self.infcx.tcx.typeck(self.mir_def_id());
|
||||
let expr_ty: Option<Ty<'_>> = visitor.prop_expr.map(|expr| typeck_results.expr_ty(expr).peel_refs());
|
||||
|
||||
let is_format_arguments_item =
|
||||
if let Some(expr_ty) = expr_ty
|
||||
&& let ty::Adt(adt, _) = expr_ty.kind() {
|
||||
self.infcx.tcx.lang_items().get(LangItem::FormatArguments) == Some(adt.did())
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
if visitor.found == 0
|
||||
&& stmt.span.contains(proper_span)
|
||||
&& let Some(p) = sm.span_to_margin(stmt.span)
|
||||
&& let Ok(s) = sm.span_to_snippet(proper_span)
|
||||
{
|
||||
let addition = format!("let binding = {};\n{}", s, " ".repeat(p));
|
||||
err.multipart_suggestion_verbose(
|
||||
msg,
|
||||
vec![
|
||||
(stmt.span.shrink_to_lo(), addition),
|
||||
(proper_span, "binding".to_string()),
|
||||
],
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
if !is_format_arguments_item {
|
||||
let addition = format!("let binding = {};\n{}", s, " ".repeat(p));
|
||||
err.multipart_suggestion_verbose(
|
||||
msg,
|
||||
vec![
|
||||
(stmt.span.shrink_to_lo(), addition),
|
||||
(proper_span, "binding".to_string()),
|
||||
],
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
} else {
|
||||
err.note("the result of `format_args!` can only be assigned directly if no placeholders in it's arguments are used");
|
||||
err.note("to learn more, visit <https://doc.rust-lang.org/std/macro.format_args.html>");
|
||||
}
|
||||
suggested = true;
|
||||
break;
|
||||
}
|
||||
@ -2620,7 +2643,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
let loan_span = loan_spans.args_or_use();
|
||||
|
||||
let descr_place = self.describe_any_place(place.as_ref());
|
||||
if loan.kind == BorrowKind::Shallow {
|
||||
if loan.kind == BorrowKind::Fake {
|
||||
if let Some(section) = self.classify_immutable_section(loan.assigned_place) {
|
||||
let mut err = self.cannot_mutate_in_immutable_section(
|
||||
span,
|
||||
@ -2804,6 +2827,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
}
|
||||
ProjectionElem::ConstantIndex { .. }
|
||||
| ProjectionElem::Subslice { .. }
|
||||
| ProjectionElem::Subtype(_)
|
||||
| ProjectionElem::Index(_) => kind,
|
||||
},
|
||||
place_ty.projection_ty(tcx, elem),
|
||||
|
||||
@ -13,7 +13,7 @@ use rustc_index::IndexSlice;
|
||||
use rustc_infer::infer::LateBoundRegionConversionTime;
|
||||
use rustc_middle::mir::tcx::PlaceTy;
|
||||
use rustc_middle::mir::{
|
||||
AggregateKind, CallSource, Constant, FakeReadCause, Local, LocalInfo, LocalKind, Location,
|
||||
AggregateKind, CallSource, ConstOperand, FakeReadCause, Local, LocalInfo, LocalKind, Location,
|
||||
Operand, Place, PlaceRef, ProjectionElem, Rvalue, Statement, StatementKind, Terminator,
|
||||
TerminatorKind,
|
||||
};
|
||||
@ -101,12 +101,12 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
let terminator = self.body[location.block].terminator();
|
||||
debug!("add_moved_or_invoked_closure_note: terminator={:?}", terminator);
|
||||
if let TerminatorKind::Call {
|
||||
func: Operand::Constant(box Constant { literal, .. }),
|
||||
func: Operand::Constant(box ConstOperand { const_, .. }),
|
||||
args,
|
||||
..
|
||||
} = &terminator.kind
|
||||
{
|
||||
if let ty::FnDef(id, _) = *literal.ty().kind() {
|
||||
if let ty::FnDef(id, _) = *const_.ty().kind() {
|
||||
debug!("add_moved_or_invoked_closure_note: id={:?}", id);
|
||||
if Some(self.infcx.tcx.parent(id)) == self.infcx.tcx.lang_items().fn_once_trait() {
|
||||
let closure = match args.first() {
|
||||
@ -242,6 +242,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
ProjectionElem::Downcast(..) if opt.including_downcast => return None,
|
||||
ProjectionElem::Downcast(..) => (),
|
||||
ProjectionElem::OpaqueCast(..) => (),
|
||||
ProjectionElem::Subtype(..) => (),
|
||||
ProjectionElem::Field(field, _ty) => {
|
||||
// FIXME(project-rfc_2229#36): print capture precisely here.
|
||||
if let Some(field) = self.is_upvar_field_projection(PlaceRef {
|
||||
@ -322,7 +323,9 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
PlaceRef { local, projection: proj_base }.ty(self.body, self.infcx.tcx)
|
||||
}
|
||||
ProjectionElem::Downcast(..) => place.ty(self.body, self.infcx.tcx),
|
||||
ProjectionElem::OpaqueCast(ty) => PlaceTy::from_ty(*ty),
|
||||
ProjectionElem::Subtype(ty) | ProjectionElem::OpaqueCast(ty) => {
|
||||
PlaceTy::from_ty(*ty)
|
||||
}
|
||||
ProjectionElem::Field(_, field_type) => PlaceTy::from_ty(*field_type),
|
||||
},
|
||||
};
|
||||
@ -628,7 +631,7 @@ impl UseSpans<'_> {
|
||||
err.subdiagnostic(match kind {
|
||||
Some(kd) => match kd {
|
||||
rustc_middle::mir::BorrowKind::Shared
|
||||
| rustc_middle::mir::BorrowKind::Shallow => {
|
||||
| rustc_middle::mir::BorrowKind::Fake => {
|
||||
CaptureVarKind::Immut { kind_span: capture_kind_span }
|
||||
}
|
||||
|
||||
|
||||
@ -1,9 +1,10 @@
|
||||
use hir::ExprKind;
|
||||
use rustc_errors::{Applicability, Diagnostic, DiagnosticBuilder, ErrorGuaranteed};
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::intravisit::Visitor;
|
||||
use rustc_hir::Node;
|
||||
use rustc_middle::mir::{Mutability, Place, PlaceRef, ProjectionElem};
|
||||
use rustc_middle::ty::{self, Ty, TyCtxt};
|
||||
use rustc_middle::ty::{self, InstanceDef, Ty, TyCtxt};
|
||||
use rustc_middle::{
|
||||
hir::place::PlaceBase,
|
||||
mir::{self, BindingForm, Local, LocalDecl, LocalInfo, LocalKind, Location},
|
||||
@ -158,6 +159,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
||||
[
|
||||
..,
|
||||
ProjectionElem::Index(_)
|
||||
| ProjectionElem::Subtype(_)
|
||||
| ProjectionElem::ConstantIndex { .. }
|
||||
| ProjectionElem::OpaqueCast { .. }
|
||||
| ProjectionElem::Subslice { .. }
|
||||
@ -225,17 +227,17 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
||||
}
|
||||
if suggest {
|
||||
borrow_spans.var_subdiag(
|
||||
None,
|
||||
&mut err,
|
||||
Some(mir::BorrowKind::Mut { kind: mir::MutBorrowKind::Default }),
|
||||
|_kind, var_span| {
|
||||
let place = self.describe_any_place(access_place.as_ref());
|
||||
crate::session_diagnostics::CaptureVarCause::MutableBorrowUsePlaceClosure {
|
||||
place,
|
||||
var_span,
|
||||
}
|
||||
},
|
||||
);
|
||||
None,
|
||||
&mut err,
|
||||
Some(mir::BorrowKind::Mut { kind: mir::MutBorrowKind::Default }),
|
||||
|_kind, var_span| {
|
||||
let place = self.describe_any_place(access_place.as_ref());
|
||||
crate::session_diagnostics::CaptureVarCause::MutableBorrowUsePlaceClosure {
|
||||
place,
|
||||
var_span,
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
borrow_span
|
||||
}
|
||||
@ -262,11 +264,8 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
||||
} => {
|
||||
err.span_label(span, format!("cannot {act}"));
|
||||
|
||||
if let Some(span) = get_mut_span_in_struct_field(
|
||||
self.infcx.tcx,
|
||||
Place::ty_from(local, proj_base, self.body, self.infcx.tcx).ty,
|
||||
*field,
|
||||
) {
|
||||
let place = Place::ty_from(local, proj_base, self.body, self.infcx.tcx);
|
||||
if let Some(span) = get_mut_span_in_struct_field(self.infcx.tcx, place.ty, *field) {
|
||||
err.span_suggestion_verbose(
|
||||
span,
|
||||
"consider changing this to be mutable",
|
||||
@ -373,12 +372,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
||||
err.span_label(span, format!("cannot {act}"));
|
||||
}
|
||||
if suggest {
|
||||
err.span_suggestion_verbose(
|
||||
local_decl.source_info.span.shrink_to_lo(),
|
||||
"consider changing this to be mutable",
|
||||
"mut ",
|
||||
Applicability::MachineApplicable,
|
||||
);
|
||||
self.construct_mut_suggestion_for_local_binding_patterns(&mut err, local);
|
||||
let tcx = self.infcx.tcx;
|
||||
if let ty::Closure(id, _) = *the_place_err.ty(self.body, tcx).ty.kind() {
|
||||
self.show_mutating_upvar(tcx, id.expect_local(), the_place_err, &mut err);
|
||||
@ -494,6 +488,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
||||
),
|
||||
);
|
||||
|
||||
self.suggest_using_iter_mut(&mut err);
|
||||
self.suggest_make_local_mut(&mut err, local, name);
|
||||
}
|
||||
_ => {
|
||||
@ -713,6 +708,83 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
||||
)
|
||||
}
|
||||
|
||||
fn construct_mut_suggestion_for_local_binding_patterns(
|
||||
&self,
|
||||
err: &mut DiagnosticBuilder<'_, ErrorGuaranteed>,
|
||||
local: Local,
|
||||
) {
|
||||
let local_decl = &self.body.local_decls[local];
|
||||
debug!("local_decl: {:?}", local_decl);
|
||||
let pat_span = match *local_decl.local_info() {
|
||||
LocalInfo::User(BindingForm::Var(mir::VarBindingForm {
|
||||
binding_mode: ty::BindingMode::BindByValue(Mutability::Not),
|
||||
opt_ty_info: _,
|
||||
opt_match_place: _,
|
||||
pat_span,
|
||||
})) => pat_span,
|
||||
_ => local_decl.source_info.span,
|
||||
};
|
||||
|
||||
struct BindingFinder {
|
||||
span: Span,
|
||||
hir_id: Option<hir::HirId>,
|
||||
}
|
||||
|
||||
impl<'tcx> Visitor<'tcx> for BindingFinder {
|
||||
fn visit_stmt(&mut self, s: &'tcx hir::Stmt<'tcx>) {
|
||||
if let hir::StmtKind::Local(local) = s.kind {
|
||||
if local.pat.span == self.span {
|
||||
self.hir_id = Some(local.hir_id);
|
||||
}
|
||||
}
|
||||
hir::intravisit::walk_stmt(self, s);
|
||||
}
|
||||
}
|
||||
|
||||
let hir_map = self.infcx.tcx.hir();
|
||||
let def_id = self.body.source.def_id();
|
||||
let hir_id = if let Some(local_def_id) = def_id.as_local()
|
||||
&& let Some(body_id) = hir_map.maybe_body_owned_by(local_def_id)
|
||||
{
|
||||
let body = hir_map.body(body_id);
|
||||
let mut v = BindingFinder {
|
||||
span: pat_span,
|
||||
hir_id: None,
|
||||
};
|
||||
v.visit_body(body);
|
||||
v.hir_id
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// With ref-binding patterns, the mutability suggestion has to apply to
|
||||
// the binding, not the reference (which would be a type error):
|
||||
//
|
||||
// `let &b = a;` -> `let &(mut b) = a;`
|
||||
if let Some(hir_id) = hir_id
|
||||
&& let Some(hir::Node::Local(hir::Local {
|
||||
pat: hir::Pat { kind: hir::PatKind::Ref(_, _), .. },
|
||||
..
|
||||
})) = hir_map.find(hir_id)
|
||||
&& let Ok(name) = self.infcx.tcx.sess.source_map().span_to_snippet(local_decl.source_info.span)
|
||||
{
|
||||
err.span_suggestion(
|
||||
pat_span,
|
||||
"consider changing this to be mutable",
|
||||
format!("&(mut {name})"),
|
||||
Applicability::MachineApplicable,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
err.span_suggestion_verbose(
|
||||
local_decl.source_info.span.shrink_to_lo(),
|
||||
"consider changing this to be mutable",
|
||||
"mut ",
|
||||
Applicability::MachineApplicable,
|
||||
);
|
||||
}
|
||||
|
||||
// point to span of upvar making closure call require mutable borrow
|
||||
fn show_mutating_upvar(
|
||||
&self,
|
||||
@ -781,83 +853,88 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
||||
|
||||
// Attempt to search similar mutable associated items for suggestion.
|
||||
// In the future, attempt in all path but initially for RHS of for_loop
|
||||
fn suggest_similar_mut_method_for_for_loop(&self, err: &mut Diagnostic) {
|
||||
fn suggest_similar_mut_method_for_for_loop(&self, err: &mut Diagnostic, span: Span) {
|
||||
use hir::{
|
||||
Expr,
|
||||
ExprKind::{Block, Call, DropTemps, Match, MethodCall},
|
||||
BorrowKind, Expr,
|
||||
ExprKind::{AddrOf, Block, Call, MethodCall},
|
||||
};
|
||||
|
||||
let hir_map = self.infcx.tcx.hir();
|
||||
if let Some(body_id) = hir_map.maybe_body_owned_by(self.mir_def_id()) {
|
||||
if let Block(
|
||||
hir::Block {
|
||||
expr:
|
||||
Some(Expr {
|
||||
kind:
|
||||
DropTemps(Expr {
|
||||
kind:
|
||||
Match(
|
||||
Expr {
|
||||
kind:
|
||||
Call(
|
||||
_,
|
||||
[
|
||||
Expr {
|
||||
kind:
|
||||
MethodCall(path_segment, _, _, span),
|
||||
hir_id,
|
||||
..
|
||||
},
|
||||
..,
|
||||
],
|
||||
),
|
||||
..
|
||||
},
|
||||
..,
|
||||
),
|
||||
..
|
||||
}),
|
||||
..
|
||||
}),
|
||||
..
|
||||
},
|
||||
_,
|
||||
) = hir_map.body(body_id).value.kind
|
||||
{
|
||||
let opt_suggestions = self
|
||||
.infcx
|
||||
.tcx
|
||||
.typeck(path_segment.hir_id.owner.def_id)
|
||||
.type_dependent_def_id(*hir_id)
|
||||
.and_then(|def_id| self.infcx.tcx.impl_of_method(def_id))
|
||||
.map(|def_id| self.infcx.tcx.associated_items(def_id))
|
||||
.map(|assoc_items| {
|
||||
assoc_items
|
||||
.in_definition_order()
|
||||
.map(|assoc_item_def| assoc_item_def.ident(self.infcx.tcx))
|
||||
.filter(|&ident| {
|
||||
let original_method_ident = path_segment.ident;
|
||||
original_method_ident != ident
|
||||
&& ident
|
||||
.as_str()
|
||||
.starts_with(&original_method_ident.name.to_string())
|
||||
})
|
||||
.map(|ident| format!("{ident}()"))
|
||||
.peekable()
|
||||
});
|
||||
struct Finder<'tcx> {
|
||||
span: Span,
|
||||
expr: Option<&'tcx Expr<'tcx>>,
|
||||
}
|
||||
|
||||
if let Some(mut suggestions) = opt_suggestions
|
||||
&& suggestions.peek().is_some()
|
||||
{
|
||||
err.span_suggestions(
|
||||
*span,
|
||||
"use mutable method",
|
||||
suggestions,
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
impl<'tcx> Visitor<'tcx> for Finder<'tcx> {
|
||||
fn visit_expr(&mut self, e: &'tcx hir::Expr<'tcx>) {
|
||||
if e.span == self.span && self.expr.is_none() {
|
||||
self.expr = Some(e);
|
||||
}
|
||||
hir::intravisit::walk_expr(self, e);
|
||||
}
|
||||
}
|
||||
if let Some(body_id) = hir_map.maybe_body_owned_by(self.mir_def_id())
|
||||
&& let Block(block, _) = hir_map.body(body_id).value.kind
|
||||
{
|
||||
// `span` corresponds to the expression being iterated, find the `for`-loop desugared
|
||||
// expression with that span in order to identify potential fixes when encountering a
|
||||
// read-only iterator that should be mutable.
|
||||
let mut v = Finder {
|
||||
span,
|
||||
expr: None,
|
||||
};
|
||||
v.visit_block(block);
|
||||
if let Some(expr) = v.expr && let Call(_, [expr]) = expr.kind {
|
||||
match expr.kind {
|
||||
MethodCall(path_segment, _, _, span) => {
|
||||
// We have `for _ in iter.read_only_iter()`, try to
|
||||
// suggest `for _ in iter.mutable_iter()` instead.
|
||||
let opt_suggestions = self
|
||||
.infcx
|
||||
.tcx
|
||||
.typeck(path_segment.hir_id.owner.def_id)
|
||||
.type_dependent_def_id(expr.hir_id)
|
||||
.and_then(|def_id| self.infcx.tcx.impl_of_method(def_id))
|
||||
.map(|def_id| self.infcx.tcx.associated_items(def_id))
|
||||
.map(|assoc_items| {
|
||||
assoc_items
|
||||
.in_definition_order()
|
||||
.map(|assoc_item_def| assoc_item_def.ident(self.infcx.tcx))
|
||||
.filter(|&ident| {
|
||||
let original_method_ident = path_segment.ident;
|
||||
original_method_ident != ident
|
||||
&& ident.as_str().starts_with(
|
||||
&original_method_ident.name.to_string(),
|
||||
)
|
||||
})
|
||||
.map(|ident| format!("{ident}()"))
|
||||
.peekable()
|
||||
});
|
||||
|
||||
if let Some(mut suggestions) = opt_suggestions
|
||||
&& suggestions.peek().is_some()
|
||||
{
|
||||
err.span_suggestions(
|
||||
span,
|
||||
"use mutable method",
|
||||
suggestions,
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
}
|
||||
}
|
||||
AddrOf(BorrowKind::Ref, Mutability::Not, expr) => {
|
||||
// We have `for _ in &i`, suggest `for _ in &mut i`.
|
||||
err.span_suggestion_verbose(
|
||||
expr.span.shrink_to_lo(),
|
||||
"use a mutable iterator instead",
|
||||
"mut ".to_string(),
|
||||
Applicability::MachineApplicable,
|
||||
);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// Targeted error when encountering an `FnMut` closure where an `Fn` closure was expected.
|
||||
@ -951,6 +1028,44 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
fn suggest_using_iter_mut(&self, err: &mut DiagnosticBuilder<'_, ErrorGuaranteed>) {
|
||||
let source = self.body.source;
|
||||
let hir = self.infcx.tcx.hir();
|
||||
if let InstanceDef::Item(def_id) = source.instance
|
||||
&& let Some(Node::Expr(hir::Expr { hir_id, kind, ..})) = hir.get_if_local(def_id)
|
||||
&& let ExprKind::Closure(closure) = kind && closure.movability == None
|
||||
&& let Some(Node::Expr(expr)) = hir.find_parent(*hir_id) {
|
||||
let mut cur_expr = expr;
|
||||
while let ExprKind::MethodCall(path_segment, recv, _, _) = cur_expr.kind {
|
||||
if path_segment.ident.name == sym::iter {
|
||||
// check `_ty` has `iter_mut` method
|
||||
let res = self
|
||||
.infcx
|
||||
.tcx
|
||||
.typeck(path_segment.hir_id.owner.def_id)
|
||||
.type_dependent_def_id(cur_expr.hir_id)
|
||||
.and_then(|def_id| self.infcx.tcx.impl_of_method(def_id))
|
||||
.map(|def_id| self.infcx.tcx.associated_items(def_id))
|
||||
.map(|assoc_items| {
|
||||
assoc_items.filter_by_name_unhygienic(sym::iter_mut).peekable()
|
||||
});
|
||||
|
||||
if let Some(mut res) = res && res.peek().is_some() {
|
||||
err.span_suggestion_verbose(
|
||||
path_segment.ident.span,
|
||||
"you may want to use `iter_mut` here",
|
||||
"iter_mut",
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
}
|
||||
break;
|
||||
} else {
|
||||
cur_expr = recv;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn suggest_make_local_mut(
|
||||
&self,
|
||||
err: &mut DiagnosticBuilder<'_, ErrorGuaranteed>,
|
||||
@ -1003,9 +1118,10 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
||||
match opt_assignment_rhs_span.and_then(|s| s.desugaring_kind()) {
|
||||
// on for loops, RHS points to the iterator part
|
||||
Some(DesugaringKind::ForLoop) => {
|
||||
self.suggest_similar_mut_method_for_for_loop(err);
|
||||
let span = opt_assignment_rhs_span.unwrap();
|
||||
self.suggest_similar_mut_method_for_for_loop(err, span);
|
||||
err.span_label(
|
||||
opt_assignment_rhs_span.unwrap(),
|
||||
span,
|
||||
format!("this iterator yields `{pointer_sigil}` {pointer_desc}s",),
|
||||
);
|
||||
None
|
||||
|
||||
@ -245,7 +245,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
||||
let Trait(PolyTraitRef { trait_ref, span: trait_span, .. }, _) = bound else { return; };
|
||||
diag.span_note(
|
||||
*trait_span,
|
||||
format!("due to current limitations in the borrow checker, this implies a `'static` lifetime")
|
||||
"due to current limitations in the borrow checker, this implies a `'static` lifetime"
|
||||
);
|
||||
let Some(generics_fn) = hir.get_generics(self.body.source.def_id().expect_local()) else { return; };
|
||||
let Def(_, trait_res_defid) = trait_ref.path.res else { return; };
|
||||
@ -277,7 +277,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
||||
if suggestions.len() > 0 {
|
||||
suggestions.dedup();
|
||||
diag.multipart_suggestion_verbose(
|
||||
format!("consider restricting the type parameter to the `'static` lifetime"),
|
||||
"consider restricting the type parameter to the `'static` lifetime",
|
||||
suggestions,
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
|
||||
@ -27,7 +27,7 @@ pub(crate) struct RegionName {
|
||||
/// This helps to print the right kinds of diagnostics.
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) enum RegionNameSource {
|
||||
/// A bound (not free) region that was substituted at the def site (not an HRTB).
|
||||
/// A bound (not free) region that was instantiated at the def site (not an HRTB).
|
||||
NamedEarlyBoundRegion(Span),
|
||||
/// A free region that the user has a name (`'a`) for.
|
||||
NamedFreeRegion(Span),
|
||||
@ -302,7 +302,7 @@ impl<'tcx> MirBorrowckCtxt<'_, 'tcx> {
|
||||
if free_region.bound_region.is_named() {
|
||||
// A named region that is actually named.
|
||||
Some(RegionName { name, source: RegionNameSource::NamedFreeRegion(span) })
|
||||
} else if let hir::IsAsync::Async = tcx.asyncness(self.mir_hir_id().owner) {
|
||||
} else if tcx.asyncness(self.mir_hir_id().owner).is_async() {
|
||||
// If we spuriously thought that the region is named, we should let the
|
||||
// system generate a true name for error messages. Currently this can
|
||||
// happen if we have an elided name in an async fn for example: the
|
||||
@ -354,7 +354,7 @@ impl<'tcx> MirBorrowckCtxt<'_, 'tcx> {
|
||||
})
|
||||
}
|
||||
|
||||
ty::BoundRegionKind::BrAnon(..) => None,
|
||||
ty::BoundRegionKind::BrAnon => None,
|
||||
},
|
||||
|
||||
ty::ReLateBound(..)
|
||||
@ -442,8 +442,8 @@ impl<'tcx> MirBorrowckCtxt<'_, 'tcx> {
|
||||
span: Span,
|
||||
counter: usize,
|
||||
) -> RegionNameHighlight {
|
||||
let mut highlight = RegionHighlightMode::new(self.infcx.tcx);
|
||||
highlight.highlighting_region_vid(needle_fr, counter);
|
||||
let mut highlight = RegionHighlightMode::default();
|
||||
highlight.highlighting_region_vid(self.infcx.tcx, needle_fr, counter);
|
||||
let type_name =
|
||||
self.infcx.extract_inference_diagnostics_data(ty.into(), Some(highlight)).name;
|
||||
|
||||
@ -516,7 +516,7 @@ impl<'tcx> MirBorrowckCtxt<'_, 'tcx> {
|
||||
// be the same as those of the ADT.
|
||||
// FIXME: We should be able to do something similar to
|
||||
// match_adt_and_segment in this case.
|
||||
Res::Def(DefKind::TyAlias { .. }, _) => (),
|
||||
Res::Def(DefKind::TyAlias, _) => (),
|
||||
_ => {
|
||||
if let Some(last_segment) = path.segments.last() {
|
||||
if let Some(highlight) = self.match_adt_and_segment(
|
||||
@ -619,7 +619,7 @@ impl<'tcx> MirBorrowckCtxt<'_, 'tcx> {
|
||||
// programs, so we need to use delay_span_bug here. See #82126.
|
||||
self.infcx.tcx.sess.delay_span_bug(
|
||||
hir_arg.span(),
|
||||
format!("unmatched subst and hir arg: found {kind:?} vs {hir_arg:?}"),
|
||||
format!("unmatched arg and hir arg: found {kind:?} vs {hir_arg:?}"),
|
||||
);
|
||||
}
|
||||
}
|
||||
@ -804,8 +804,8 @@ impl<'tcx> MirBorrowckCtxt<'_, 'tcx> {
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut highlight = RegionHighlightMode::new(tcx);
|
||||
highlight.highlighting_region_vid(fr, *self.next_region_name.try_borrow().unwrap());
|
||||
let mut highlight = RegionHighlightMode::default();
|
||||
highlight.highlighting_region_vid(tcx, fr, *self.next_region_name.try_borrow().unwrap());
|
||||
let type_name =
|
||||
self.infcx.extract_inference_diagnostics_data(yield_ty.into(), Some(highlight)).name;
|
||||
|
||||
|
||||
@ -159,7 +159,9 @@ impl<'cx, 'tcx> Visitor<'tcx> for InvalidationGenerator<'cx, 'tcx> {
|
||||
|
||||
self.mutate_place(location, *resume_arg, Deep);
|
||||
}
|
||||
TerminatorKind::Resume | TerminatorKind::Return | TerminatorKind::GeneratorDrop => {
|
||||
TerminatorKind::UnwindResume
|
||||
| TerminatorKind::Return
|
||||
| TerminatorKind::GeneratorDrop => {
|
||||
// Invalidate all borrows of local places
|
||||
let borrow_set = self.borrow_set;
|
||||
let start = self.location_table.start_index(location);
|
||||
@ -200,7 +202,7 @@ impl<'cx, 'tcx> Visitor<'tcx> for InvalidationGenerator<'cx, 'tcx> {
|
||||
}
|
||||
}
|
||||
TerminatorKind::Goto { target: _ }
|
||||
| TerminatorKind::Terminate
|
||||
| TerminatorKind::UnwindTerminate(_)
|
||||
| TerminatorKind::Unreachable
|
||||
| TerminatorKind::FalseEdge { real_target: _, imaginary_target: _ }
|
||||
| TerminatorKind::FalseUnwind { real_target: _, unwind: _ } => {
|
||||
@ -251,8 +253,8 @@ impl<'cx, 'tcx> InvalidationGenerator<'cx, 'tcx> {
|
||||
match rvalue {
|
||||
&Rvalue::Ref(_ /*rgn*/, bk, place) => {
|
||||
let access_kind = match bk {
|
||||
BorrowKind::Shallow => {
|
||||
(Shallow(Some(ArtificialField::ShallowBorrow)), Read(ReadKind::Borrow(bk)))
|
||||
BorrowKind::Fake => {
|
||||
(Shallow(Some(ArtificialField::FakeBorrow)), Read(ReadKind::Borrow(bk)))
|
||||
}
|
||||
BorrowKind::Shared => (Deep, Read(ReadKind::Borrow(bk))),
|
||||
BorrowKind::Mut { .. } => {
|
||||
@ -374,8 +376,8 @@ impl<'cx, 'tcx> InvalidationGenerator<'cx, 'tcx> {
|
||||
// have already taken the reservation
|
||||
}
|
||||
|
||||
(Read(_), BorrowKind::Shallow | BorrowKind::Shared)
|
||||
| (Read(ReadKind::Borrow(BorrowKind::Shallow)), BorrowKind::Mut { .. }) => {
|
||||
(Read(_), BorrowKind::Fake | BorrowKind::Shared)
|
||||
| (Read(ReadKind::Borrow(BorrowKind::Fake)), BorrowKind::Mut { .. }) => {
|
||||
// Reads don't invalidate shared or shallow borrows
|
||||
}
|
||||
|
||||
@ -420,7 +422,7 @@ impl<'cx, 'tcx> InvalidationGenerator<'cx, 'tcx> {
|
||||
|
||||
// only mutable borrows should be 2-phase
|
||||
assert!(match borrow.kind {
|
||||
BorrowKind::Shared | BorrowKind::Shallow => false,
|
||||
BorrowKind::Shared | BorrowKind::Fake => false,
|
||||
BorrowKind::Mut { .. } => true,
|
||||
});
|
||||
|
||||
|
||||
@ -11,7 +11,7 @@
|
||||
#![feature(trusted_step)]
|
||||
#![feature(try_blocks)]
|
||||
#![recursion_limit = "256"]
|
||||
#![cfg_attr(not(bootstrap), allow(internal_features))]
|
||||
#![allow(internal_features)]
|
||||
|
||||
#[macro_use]
|
||||
extern crate rustc_middle;
|
||||
@ -603,7 +603,7 @@ impl<'cx, 'tcx, R> rustc_mir_dataflow::ResultsVisitor<'cx, 'tcx, R> for MirBorro
|
||||
|
||||
fn visit_statement_before_primary_effect(
|
||||
&mut self,
|
||||
_results: &R,
|
||||
_results: &mut R,
|
||||
flow_state: &Flows<'cx, 'tcx>,
|
||||
stmt: &'cx Statement<'tcx>,
|
||||
location: Location,
|
||||
@ -673,7 +673,7 @@ impl<'cx, 'tcx, R> rustc_mir_dataflow::ResultsVisitor<'cx, 'tcx, R> for MirBorro
|
||||
|
||||
fn visit_terminator_before_primary_effect(
|
||||
&mut self,
|
||||
_results: &R,
|
||||
_results: &mut R,
|
||||
flow_state: &Flows<'cx, 'tcx>,
|
||||
term: &'cx Terminator<'tcx>,
|
||||
loc: Location,
|
||||
@ -770,9 +770,9 @@ impl<'cx, 'tcx, R> rustc_mir_dataflow::ResultsVisitor<'cx, 'tcx, R> for MirBorro
|
||||
}
|
||||
|
||||
TerminatorKind::Goto { target: _ }
|
||||
| TerminatorKind::Terminate
|
||||
| TerminatorKind::UnwindTerminate(_)
|
||||
| TerminatorKind::Unreachable
|
||||
| TerminatorKind::Resume
|
||||
| TerminatorKind::UnwindResume
|
||||
| TerminatorKind::Return
|
||||
| TerminatorKind::GeneratorDrop
|
||||
| TerminatorKind::FalseEdge { real_target: _, imaginary_target: _ }
|
||||
@ -784,7 +784,7 @@ impl<'cx, 'tcx, R> rustc_mir_dataflow::ResultsVisitor<'cx, 'tcx, R> for MirBorro
|
||||
|
||||
fn visit_terminator_after_primary_effect(
|
||||
&mut self,
|
||||
_results: &R,
|
||||
_results: &mut R,
|
||||
flow_state: &Flows<'cx, 'tcx>,
|
||||
term: &'cx Terminator<'tcx>,
|
||||
loc: Location,
|
||||
@ -803,7 +803,9 @@ impl<'cx, 'tcx, R> rustc_mir_dataflow::ResultsVisitor<'cx, 'tcx, R> for MirBorro
|
||||
}
|
||||
}
|
||||
|
||||
TerminatorKind::Resume | TerminatorKind::Return | TerminatorKind::GeneratorDrop => {
|
||||
TerminatorKind::UnwindResume
|
||||
| TerminatorKind::Return
|
||||
| TerminatorKind::GeneratorDrop => {
|
||||
// Returning from the function implicitly kills storage for all locals and statics.
|
||||
// Often, the storage will already have been killed by an explicit
|
||||
// StorageDead, but we don't always emit those (notably on unwind paths),
|
||||
@ -815,7 +817,7 @@ impl<'cx, 'tcx, R> rustc_mir_dataflow::ResultsVisitor<'cx, 'tcx, R> for MirBorro
|
||||
}
|
||||
}
|
||||
|
||||
TerminatorKind::Terminate
|
||||
TerminatorKind::UnwindTerminate(_)
|
||||
| TerminatorKind::Assert { .. }
|
||||
| TerminatorKind::Call { .. }
|
||||
| TerminatorKind::Drop { .. }
|
||||
@ -835,7 +837,7 @@ use self::ReadOrWrite::{Activation, Read, Reservation, Write};
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
|
||||
enum ArtificialField {
|
||||
ArrayLength,
|
||||
ShallowBorrow,
|
||||
FakeBorrow,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
|
||||
@ -1074,18 +1076,18 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
Control::Continue
|
||||
}
|
||||
|
||||
(Read(_), BorrowKind::Shared | BorrowKind::Shallow)
|
||||
| (Read(ReadKind::Borrow(BorrowKind::Shallow)), BorrowKind::Mut { .. }) => {
|
||||
(Read(_), BorrowKind::Shared | BorrowKind::Fake)
|
||||
| (Read(ReadKind::Borrow(BorrowKind::Fake)), BorrowKind::Mut { .. }) => {
|
||||
Control::Continue
|
||||
}
|
||||
|
||||
(Reservation(_), BorrowKind::Shallow | BorrowKind::Shared) => {
|
||||
(Reservation(_), BorrowKind::Fake | BorrowKind::Shared) => {
|
||||
// This used to be a future compatibility warning (to be
|
||||
// disallowed on NLL). See rust-lang/rust#56254
|
||||
Control::Continue
|
||||
}
|
||||
|
||||
(Write(WriteKind::Move), BorrowKind::Shallow) => {
|
||||
(Write(WriteKind::Move), BorrowKind::Fake) => {
|
||||
// Handled by initialization checks.
|
||||
Control::Continue
|
||||
}
|
||||
@ -1193,8 +1195,8 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
match rvalue {
|
||||
&Rvalue::Ref(_ /*rgn*/, bk, place) => {
|
||||
let access_kind = match bk {
|
||||
BorrowKind::Shallow => {
|
||||
(Shallow(Some(ArtificialField::ShallowBorrow)), Read(ReadKind::Borrow(bk)))
|
||||
BorrowKind::Fake => {
|
||||
(Shallow(Some(ArtificialField::FakeBorrow)), Read(ReadKind::Borrow(bk)))
|
||||
}
|
||||
BorrowKind::Shared => (Deep, Read(ReadKind::Borrow(bk))),
|
||||
BorrowKind::Mut { .. } => {
|
||||
@ -1215,7 +1217,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
flow_state,
|
||||
);
|
||||
|
||||
let action = if bk == BorrowKind::Shallow {
|
||||
let action = if bk == BorrowKind::Fake {
|
||||
InitializationRequiringAction::MatchOn
|
||||
} else {
|
||||
InitializationRequiringAction::Borrow
|
||||
@ -1567,7 +1569,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
|
||||
// only mutable borrows should be 2-phase
|
||||
assert!(match borrow.kind {
|
||||
BorrowKind::Shared | BorrowKind::Shallow => false,
|
||||
BorrowKind::Shared | BorrowKind::Fake => false,
|
||||
BorrowKind::Mut { .. } => true,
|
||||
});
|
||||
|
||||
@ -1801,6 +1803,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
for (place_base, elem) in place.iter_projections().rev() {
|
||||
match elem {
|
||||
ProjectionElem::Index(_/*operand*/) |
|
||||
ProjectionElem::Subtype(_) |
|
||||
ProjectionElem::OpaqueCast(_) |
|
||||
ProjectionElem::ConstantIndex { .. } |
|
||||
// assigning to P[i] requires P to be valid.
|
||||
@ -2000,14 +2003,14 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
| WriteKind::Replace
|
||||
| WriteKind::StorageDeadOrDrop
|
||||
| WriteKind::MutableBorrow(BorrowKind::Shared)
|
||||
| WriteKind::MutableBorrow(BorrowKind::Shallow),
|
||||
| WriteKind::MutableBorrow(BorrowKind::Fake),
|
||||
)
|
||||
| Write(
|
||||
WriteKind::Move
|
||||
| WriteKind::Replace
|
||||
| WriteKind::StorageDeadOrDrop
|
||||
| WriteKind::MutableBorrow(BorrowKind::Shared)
|
||||
| WriteKind::MutableBorrow(BorrowKind::Shallow),
|
||||
| WriteKind::MutableBorrow(BorrowKind::Fake),
|
||||
) => {
|
||||
if self.is_mutable(place.as_ref(), is_local_mutation_allowed).is_err()
|
||||
&& !self.has_buffered_errors()
|
||||
@ -2031,7 +2034,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
return false;
|
||||
}
|
||||
Read(
|
||||
ReadKind::Borrow(BorrowKind::Mut { .. } | BorrowKind::Shared | BorrowKind::Shallow)
|
||||
ReadKind::Borrow(BorrowKind::Mut { .. } | BorrowKind::Shared | BorrowKind::Fake)
|
||||
| ReadKind::Copy,
|
||||
) => {
|
||||
// Access authorized
|
||||
@ -2189,6 +2192,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
| ProjectionElem::Index(..)
|
||||
| ProjectionElem::ConstantIndex { .. }
|
||||
| ProjectionElem::Subslice { .. }
|
||||
| ProjectionElem::Subtype(..)
|
||||
| ProjectionElem::OpaqueCast { .. }
|
||||
| ProjectionElem::Downcast(..) => {
|
||||
let upvar_field_projection = self.is_upvar_field_projection(place);
|
||||
|
||||
@ -10,6 +10,7 @@ use rustc_middle::mir::{
|
||||
Body, ClosureOutlivesSubject, ClosureRegionRequirements, LocalKind, Location, Promoted,
|
||||
START_BLOCK,
|
||||
};
|
||||
use rustc_middle::ty::print::with_no_trimmed_paths;
|
||||
use rustc_middle::ty::{self, OpaqueHiddenType, TyCtxt};
|
||||
use rustc_span::symbol::sym;
|
||||
use std::env;
|
||||
@ -441,7 +442,10 @@ fn for_each_region_constraint<'tcx>(
|
||||
let subject = match req.subject {
|
||||
ClosureOutlivesSubject::Region(subject) => format!("{subject:?}"),
|
||||
ClosureOutlivesSubject::Ty(ty) => {
|
||||
format!("{:?}", ty.instantiate(tcx, |vid| ty::Region::new_var(tcx, vid)))
|
||||
with_no_trimmed_paths!(format!(
|
||||
"{}",
|
||||
ty.instantiate(tcx, |vid| ty::Region::new_var(tcx, vid))
|
||||
))
|
||||
}
|
||||
};
|
||||
with_msg(format!("where {}: {:?}", subject, req.outlived_free_region,))?;
|
||||
|
||||
@ -204,7 +204,7 @@ fn place_components_conflict<'tcx>(
|
||||
|
||||
match (elem, &base_ty.kind(), access) {
|
||||
(_, _, Shallow(Some(ArtificialField::ArrayLength)))
|
||||
| (_, _, Shallow(Some(ArtificialField::ShallowBorrow))) => {
|
||||
| (_, _, Shallow(Some(ArtificialField::FakeBorrow))) => {
|
||||
// The array length is like additional fields on the
|
||||
// type; it does not overlap any existing data there.
|
||||
// Furthermore, if cannot actually be a prefix of any
|
||||
@ -249,6 +249,7 @@ fn place_components_conflict<'tcx>(
|
||||
| (ProjectionElem::ConstantIndex { .. }, _, _)
|
||||
| (ProjectionElem::Subslice { .. }, _, _)
|
||||
| (ProjectionElem::OpaqueCast { .. }, _, _)
|
||||
| (ProjectionElem::Subtype(_), _, _)
|
||||
| (ProjectionElem::Downcast { .. }, _, _) => {
|
||||
// Recursive case. This can still be disjoint on a
|
||||
// further iteration if this a shallow access and
|
||||
@ -272,10 +273,10 @@ fn place_components_conflict<'tcx>(
|
||||
// If the second example, where we did, then we still know
|
||||
// that the borrow can access a *part* of our place that
|
||||
// our access cares about, so we still have a conflict.
|
||||
if borrow_kind == BorrowKind::Shallow
|
||||
if borrow_kind == BorrowKind::Fake
|
||||
&& borrow_place.projection.len() < access_place.projection.len()
|
||||
{
|
||||
debug!("borrow_conflicts_with_place: shallow borrow");
|
||||
debug!("borrow_conflicts_with_place: fake borrow");
|
||||
false
|
||||
} else {
|
||||
debug!("borrow_conflicts_with_place: full borrow, CONFLICT");
|
||||
@ -508,6 +509,7 @@ fn place_projection_conflict<'tcx>(
|
||||
| ProjectionElem::Field(..)
|
||||
| ProjectionElem::Index(..)
|
||||
| ProjectionElem::ConstantIndex { .. }
|
||||
| ProjectionElem::Subtype(_)
|
||||
| ProjectionElem::OpaqueCast { .. }
|
||||
| ProjectionElem::Subslice { .. }
|
||||
| ProjectionElem::Downcast(..),
|
||||
|
||||
@ -89,6 +89,9 @@ impl<'cx, 'tcx> Iterator for Prefixes<'cx, 'tcx> {
|
||||
cursor = cursor_base;
|
||||
continue 'cursor;
|
||||
}
|
||||
ProjectionElem::Subtype(..) => {
|
||||
panic!("Subtype projection is not allowed before borrow check")
|
||||
}
|
||||
ProjectionElem::Deref => {
|
||||
// (handled below)
|
||||
}
|
||||
|
||||
@ -2249,7 +2249,14 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
||||
}
|
||||
|
||||
pub(crate) fn universe_info(&self, universe: ty::UniverseIndex) -> UniverseInfo<'tcx> {
|
||||
self.universe_causes[&universe].clone()
|
||||
// Query canonicalization can create local superuniverses (for example in
|
||||
// `InferCtx::query_response_instantiation_guess`), but they don't have an associated
|
||||
// `UniverseInfo` explaining why they were created.
|
||||
// This can cause ICEs if these causes are accessed in diagnostics, for example in issue
|
||||
// #114907 where this happens via liveness and dropck outlives results.
|
||||
// Therefore, we return a default value in case that happens, which should at worst emit a
|
||||
// suboptimal error, instead of the ICE.
|
||||
self.universe_causes.get(&universe).cloned().unwrap_or_else(|| UniverseInfo::other())
|
||||
}
|
||||
|
||||
/// Tries to find the terminator of the loop in which the region 'r' resides.
|
||||
|
||||
@ -4,11 +4,10 @@ use crate::BorrowckInferCtxt;
|
||||
use rustc_index::IndexSlice;
|
||||
use rustc_infer::infer::NllRegionVariableOrigin;
|
||||
use rustc_middle::mir::visit::{MutVisitor, TyContext};
|
||||
use rustc_middle::mir::Constant;
|
||||
use rustc_middle::mir::{Body, Location, Promoted};
|
||||
use rustc_middle::mir::{Body, ConstOperand, Location, Promoted};
|
||||
use rustc_middle::ty::GenericArgsRef;
|
||||
use rustc_middle::ty::{self, Ty, TyCtxt, TypeFoldable};
|
||||
use rustc_span::{Span, Symbol};
|
||||
use rustc_span::Symbol;
|
||||
|
||||
/// Replaces all free regions appearing in the MIR with fresh
|
||||
/// inference variables, returning the number of variables created.
|
||||
@ -29,21 +28,15 @@ pub fn renumber_mir<'tcx>(
|
||||
renumberer.visit_body(body);
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub(crate) enum BoundRegionInfo {
|
||||
Name(Symbol),
|
||||
Span(Span),
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub(crate) enum RegionCtxt {
|
||||
Location(Location),
|
||||
TyContext(TyContext),
|
||||
Free(Symbol),
|
||||
Bound(BoundRegionInfo),
|
||||
LateBound(BoundRegionInfo),
|
||||
Bound(Symbol),
|
||||
LateBound(Symbol),
|
||||
Existential(Option<Symbol>),
|
||||
Placeholder(BoundRegionInfo),
|
||||
Placeholder(Symbol),
|
||||
Unknown,
|
||||
}
|
||||
|
||||
@ -117,9 +110,9 @@ impl<'a, 'tcx> MutVisitor<'tcx> for RegionRenumberer<'a, 'tcx> {
|
||||
}
|
||||
|
||||
#[instrument(skip(self), level = "debug")]
|
||||
fn visit_constant(&mut self, constant: &mut Constant<'tcx>, location: Location) {
|
||||
let literal = constant.literal;
|
||||
constant.literal = self.renumber_regions(literal, || RegionCtxt::Location(location));
|
||||
fn visit_constant(&mut self, constant: &mut ConstOperand<'tcx>, location: Location) {
|
||||
let const_ = constant.const_;
|
||||
constant.const_ = self.renumber_regions(const_, || RegionCtxt::Location(location));
|
||||
debug!("constant: {:#?}", constant);
|
||||
}
|
||||
}
|
||||
|
||||
@ -452,3 +452,10 @@ pub(crate) enum TypeNoCopy<'a, 'tcx> {
|
||||
#[note(borrowck_ty_no_impl_copy)]
|
||||
Note { is_partial_move: bool, ty: Ty<'tcx>, place: &'a str },
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(borrowck_simd_shuffle_last_const)]
|
||||
pub(crate) struct SimdShuffleLastConst {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
@ -9,7 +9,7 @@ use rustc_span::Span;
|
||||
use rustc_trait_selection::traits::query::type_op::{self, TypeOpOutput};
|
||||
use rustc_trait_selection::traits::ObligationCause;
|
||||
|
||||
use crate::diagnostics::{ToUniverseInfo, UniverseInfo};
|
||||
use crate::diagnostics::ToUniverseInfo;
|
||||
|
||||
use super::{Locations, NormalizeLocation, TypeChecker};
|
||||
|
||||
@ -46,13 +46,11 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||
self.push_region_constraints(locations, category, data);
|
||||
}
|
||||
|
||||
// If the query has created new universes and errors are going to be emitted, register the
|
||||
// cause of these new universes for improved diagnostics.
|
||||
let universe = self.infcx.universe();
|
||||
|
||||
if old_universe != universe {
|
||||
let universe_info = match error_info {
|
||||
Some(error_info) => error_info.to_universe_info(old_universe),
|
||||
None => UniverseInfo::other(),
|
||||
};
|
||||
if old_universe != universe && let Some(error_info) = error_info {
|
||||
let universe_info = error_info.to_universe_info(old_universe);
|
||||
for u in (old_universe + 1)..=universe {
|
||||
self.borrowck_context.constraints.universe_causes.insert(u, universe_info.clone());
|
||||
}
|
||||
@ -69,15 +67,8 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||
where
|
||||
T: TypeFoldable<TyCtxt<'tcx>>,
|
||||
{
|
||||
let old_universe = self.infcx.universe();
|
||||
|
||||
let (instantiated, _) =
|
||||
self.infcx.instantiate_canonical_with_fresh_inference_vars(span, canonical);
|
||||
|
||||
for u in (old_universe + 1)..=self.infcx.universe() {
|
||||
self.borrowck_context.constraints.universe_causes.insert(u, UniverseInfo::other());
|
||||
}
|
||||
|
||||
instantiated
|
||||
}
|
||||
|
||||
|
||||
@ -50,7 +50,7 @@ use rustc_mir_dataflow::impls::MaybeInitializedPlaces;
|
||||
use rustc_mir_dataflow::move_paths::MoveData;
|
||||
use rustc_mir_dataflow::ResultsCursor;
|
||||
|
||||
use crate::session_diagnostics::MoveUnsized;
|
||||
use crate::session_diagnostics::{MoveUnsized, SimdShuffleLastConst};
|
||||
use crate::{
|
||||
borrow_set::BorrowSet,
|
||||
constraints::{OutlivesConstraint, OutlivesConstraintSet},
|
||||
@ -163,10 +163,6 @@ pub(crate) fn type_check<'mir, 'tcx>(
|
||||
|
||||
debug!(?normalized_inputs_and_output);
|
||||
|
||||
for u in ty::UniverseIndex::ROOT..=infcx.universe() {
|
||||
constraints.universe_causes.insert(u, UniverseInfo::other());
|
||||
}
|
||||
|
||||
let mut borrowck_context = BorrowCheckContext {
|
||||
universal_regions,
|
||||
location_table,
|
||||
@ -306,11 +302,11 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> {
|
||||
self.sanitize_place(place, location, context);
|
||||
}
|
||||
|
||||
fn visit_constant(&mut self, constant: &Constant<'tcx>, location: Location) {
|
||||
fn visit_constant(&mut self, constant: &ConstOperand<'tcx>, location: Location) {
|
||||
debug!(?constant, ?location, "visit_constant");
|
||||
|
||||
self.super_constant(constant, location);
|
||||
let ty = self.sanitize_type(constant, constant.literal.ty());
|
||||
let ty = self.sanitize_type(constant, constant.const_.ty());
|
||||
|
||||
self.cx.infcx.tcx.for_each_free_region(&ty, |live_region| {
|
||||
let live_region_vid =
|
||||
@ -332,7 +328,7 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> {
|
||||
|
||||
if let Some(annotation_index) = constant.user_ty {
|
||||
if let Err(terr) = self.cx.relate_type_and_user_type(
|
||||
constant.literal.ty(),
|
||||
constant.const_.ty(),
|
||||
ty::Variance::Invariant,
|
||||
&UserTypeProjection { base: annotation_index, projs: vec![] },
|
||||
locations,
|
||||
@ -344,20 +340,20 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> {
|
||||
constant,
|
||||
"bad constant user type {:?} vs {:?}: {:?}",
|
||||
annotation,
|
||||
constant.literal.ty(),
|
||||
constant.const_.ty(),
|
||||
terr,
|
||||
);
|
||||
}
|
||||
} else {
|
||||
let tcx = self.tcx();
|
||||
let maybe_uneval = match constant.literal {
|
||||
ConstantKind::Ty(ct) => match ct.kind() {
|
||||
let maybe_uneval = match constant.const_ {
|
||||
Const::Ty(ct) => match ct.kind() {
|
||||
ty::ConstKind::Unevaluated(_) => {
|
||||
bug!("should not encounter unevaluated ConstantKind::Ty here, got {:?}", ct)
|
||||
bug!("should not encounter unevaluated Const::Ty here, got {:?}", ct)
|
||||
}
|
||||
_ => None,
|
||||
},
|
||||
ConstantKind::Unevaluated(uv, _) => Some(uv),
|
||||
Const::Unevaluated(uv, _) => Some(uv),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
@ -388,7 +384,7 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> {
|
||||
check_err(self, promoted_body, ty, promoted_ty);
|
||||
} else {
|
||||
self.cx.ascribe_user_type(
|
||||
constant.literal.ty(),
|
||||
constant.const_.ty(),
|
||||
UserType::TypeOf(uv.def, UserArgs { args: uv.args, user_self_ty: None }),
|
||||
locations.span(&self.cx.body),
|
||||
);
|
||||
@ -396,7 +392,7 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> {
|
||||
} else if let Some(static_def_id) = constant.check_static_ptr(tcx) {
|
||||
let unnormalized_ty = tcx.type_of(static_def_id).instantiate_identity();
|
||||
let normalized_ty = self.cx.normalize(unnormalized_ty, locations);
|
||||
let literal_ty = constant.literal.ty().builtin_deref(true).unwrap().ty;
|
||||
let literal_ty = constant.const_.ty().builtin_deref(true).unwrap().ty;
|
||||
|
||||
if let Err(terr) = self.cx.eq_types(
|
||||
literal_ty,
|
||||
@ -408,7 +404,7 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
if let ty::FnDef(def_id, args) = *constant.literal.ty().kind() {
|
||||
if let ty::FnDef(def_id, args) = *constant.const_.ty().kind() {
|
||||
let instantiated_predicates = tcx.predicates_of(def_id).instantiate(tcx, args);
|
||||
self.cx.normalize_and_prove_instantiated_predicates(
|
||||
def_id,
|
||||
@ -720,6 +716,9 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> {
|
||||
}
|
||||
PlaceTy::from_ty(fty)
|
||||
}
|
||||
ProjectionElem::Subtype(_) => {
|
||||
bug!("ProjectionElem::Subtype shouldn't exist in borrowck")
|
||||
}
|
||||
ProjectionElem::OpaqueCast(ty) => {
|
||||
let ty = self.sanitize_type(place, ty);
|
||||
let ty = self.cx.normalize(ty, location);
|
||||
@ -749,7 +748,7 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> {
|
||||
PlaceContext::MutatingUse(_) => ty::Invariant,
|
||||
PlaceContext::NonUse(StorageDead | StorageLive | VarDebugInfo) => ty::Invariant,
|
||||
PlaceContext::NonMutatingUse(
|
||||
Inspect | Copy | Move | PlaceMention | SharedBorrow | ShallowBorrow | AddressOf
|
||||
Inspect | Copy | Move | PlaceMention | SharedBorrow | FakeBorrow | AddressOf
|
||||
| Projection,
|
||||
) => ty::Covariant,
|
||||
PlaceContext::NonUse(AscribeUserTy(variance)) => variance,
|
||||
@ -1011,7 +1010,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||
}
|
||||
|
||||
pub(super) fn register_predefined_opaques_in_new_solver(&mut self) {
|
||||
// OK to use the identity substitutions for each opaque type key, since
|
||||
// OK to use the identity arguments for each opaque type key, since
|
||||
// we remap opaques from HIR typeck back to their definition params.
|
||||
let opaques: Vec<_> = self
|
||||
.infcx
|
||||
@ -1333,8 +1332,8 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||
debug!("terminator kind: {:?}", term.kind);
|
||||
match &term.kind {
|
||||
TerminatorKind::Goto { .. }
|
||||
| TerminatorKind::Resume
|
||||
| TerminatorKind::Terminate
|
||||
| TerminatorKind::UnwindResume
|
||||
| TerminatorKind::UnwindTerminate(_)
|
||||
| TerminatorKind::Return
|
||||
| TerminatorKind::GeneratorDrop
|
||||
| TerminatorKind::Unreachable
|
||||
@ -1371,14 +1370,13 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||
}
|
||||
};
|
||||
let (sig, map) = tcx.replace_late_bound_regions(sig, |br| {
|
||||
use crate::renumber::{BoundRegionInfo, RegionCtxt};
|
||||
use crate::renumber::RegionCtxt;
|
||||
|
||||
let region_ctxt_fn = || {
|
||||
let reg_info = match br.kind {
|
||||
ty::BoundRegionKind::BrAnon(Some(span)) => BoundRegionInfo::Span(span),
|
||||
ty::BoundRegionKind::BrAnon(..) => BoundRegionInfo::Name(sym::anon),
|
||||
ty::BoundRegionKind::BrNamed(_, name) => BoundRegionInfo::Name(name),
|
||||
ty::BoundRegionKind::BrEnv => BoundRegionInfo::Name(sym::env),
|
||||
ty::BoundRegionKind::BrAnon => sym::anon,
|
||||
ty::BoundRegionKind::BrNamed(_, name) => name,
|
||||
ty::BoundRegionKind::BrEnv => sym::env,
|
||||
};
|
||||
|
||||
RegionCtxt::LateBound(reg_info)
|
||||
@ -1430,7 +1428,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||
.add_element(region_vid, term_location);
|
||||
}
|
||||
|
||||
self.check_call_inputs(body, term, &sig, args, term_location, *call_source);
|
||||
self.check_call_inputs(body, term, func, &sig, args, term_location, *call_source);
|
||||
}
|
||||
TerminatorKind::Assert { cond, msg, .. } => {
|
||||
self.check_operand(cond, term_location);
|
||||
@ -1550,25 +1548,36 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
#[instrument(level = "debug", skip(self, body, term, func, term_location, call_source))]
|
||||
fn check_call_inputs(
|
||||
&mut self,
|
||||
body: &Body<'tcx>,
|
||||
term: &Terminator<'tcx>,
|
||||
func: &Operand<'tcx>,
|
||||
sig: &ty::FnSig<'tcx>,
|
||||
args: &[Operand<'tcx>],
|
||||
term_location: Location,
|
||||
call_source: CallSource,
|
||||
) {
|
||||
debug!("check_call_inputs({:?}, {:?})", sig, args);
|
||||
if args.len() < sig.inputs().len() || (args.len() > sig.inputs().len() && !sig.c_variadic) {
|
||||
span_mirbug!(self, term, "call to {:?} with wrong # of args", sig);
|
||||
}
|
||||
|
||||
let func_ty = if let TerminatorKind::Call { func, .. } = &term.kind {
|
||||
Some(func.ty(body, self.infcx.tcx))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let func_ty = func.ty(body, self.infcx.tcx);
|
||||
if let ty::FnDef(def_id, _) = *func_ty.kind() {
|
||||
if self.tcx().is_intrinsic(def_id) {
|
||||
match self.tcx().item_name(def_id) {
|
||||
sym::simd_shuffle => {
|
||||
if !matches!(args[2], Operand::Constant(_)) {
|
||||
self.tcx()
|
||||
.sess
|
||||
.emit_err(SimdShuffleLastConst { span: term.source_info.span });
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
debug!(?func_ty);
|
||||
|
||||
for (n, (fn_arg, op_arg)) in iter::zip(sig.inputs(), args).enumerate() {
|
||||
@ -1576,7 +1585,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||
|
||||
let op_arg_ty = self.normalize(op_arg_ty, term_location);
|
||||
let category = if call_source.from_hir_call() {
|
||||
ConstraintCategory::CallArgument(self.infcx.tcx.erase_regions(func_ty))
|
||||
ConstraintCategory::CallArgument(Some(self.infcx.tcx.erase_regions(func_ty)))
|
||||
} else {
|
||||
ConstraintCategory::Boring
|
||||
};
|
||||
@ -1608,12 +1617,12 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||
self.assert_iscleanup(body, block_data, *target, is_cleanup);
|
||||
}
|
||||
}
|
||||
TerminatorKind::Resume => {
|
||||
TerminatorKind::UnwindResume => {
|
||||
if !is_cleanup {
|
||||
span_mirbug!(self, block_data, "resume on non-cleanup block!")
|
||||
}
|
||||
}
|
||||
TerminatorKind::Terminate => {
|
||||
TerminatorKind::UnwindTerminate(_) => {
|
||||
if !is_cleanup {
|
||||
span_mirbug!(self, block_data, "abort on non-cleanup block!")
|
||||
}
|
||||
@ -1697,7 +1706,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||
span_mirbug!(self, ctxt, "unwind on cleanup block")
|
||||
}
|
||||
}
|
||||
UnwindAction::Unreachable | UnwindAction::Terminate => (),
|
||||
UnwindAction::Unreachable | UnwindAction::Terminate(_) => (),
|
||||
}
|
||||
}
|
||||
|
||||
@ -1794,9 +1803,9 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||
debug!(?op, ?location, "check_operand");
|
||||
|
||||
if let Operand::Constant(constant) = op {
|
||||
let maybe_uneval = match constant.literal {
|
||||
ConstantKind::Val(..) | ConstantKind::Ty(_) => None,
|
||||
ConstantKind::Unevaluated(uv, _) => Some(uv),
|
||||
let maybe_uneval = match constant.const_ {
|
||||
Const::Val(..) | Const::Ty(_) => None,
|
||||
Const::Unevaluated(uv, _) => Some(uv),
|
||||
};
|
||||
|
||||
if let Some(uv) = maybe_uneval {
|
||||
@ -2557,6 +2566,9 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||
| ProjectionElem::Subslice { .. } => {
|
||||
// other field access
|
||||
}
|
||||
ProjectionElem::Subtype(_) => {
|
||||
bug!("ProjectionElem::Subtype shouldn't exist in borrowck")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -11,7 +11,7 @@ use rustc_span::{Span, Symbol};
|
||||
|
||||
use crate::constraints::OutlivesConstraint;
|
||||
use crate::diagnostics::UniverseInfo;
|
||||
use crate::renumber::{BoundRegionInfo, RegionCtxt};
|
||||
use crate::renumber::RegionCtxt;
|
||||
use crate::type_check::{InstantiateOpaqueType, Locations, TypeChecker};
|
||||
|
||||
impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||
@ -126,10 +126,9 @@ impl<'tcx> TypeRelatingDelegate<'tcx> for NllTypeRelatingDelegate<'_, '_, 'tcx>
|
||||
.placeholder_region(self.type_checker.infcx, placeholder);
|
||||
|
||||
let reg_info = match placeholder.bound.kind {
|
||||
ty::BoundRegionKind::BrAnon(Some(span)) => BoundRegionInfo::Span(span),
|
||||
ty::BoundRegionKind::BrAnon(..) => BoundRegionInfo::Name(sym::anon),
|
||||
ty::BoundRegionKind::BrNamed(_, name) => BoundRegionInfo::Name(name),
|
||||
ty::BoundRegionKind::BrEnv => BoundRegionInfo::Name(sym::env),
|
||||
ty::BoundRegionKind::BrAnon => sym::anon,
|
||||
ty::BoundRegionKind::BrNamed(_, name) => name,
|
||||
ty::BoundRegionKind::BrEnv => sym::env,
|
||||
};
|
||||
|
||||
if cfg!(debug_assertions) {
|
||||
|
||||
@ -21,13 +21,14 @@ use rustc_hir::BodyOwnerKind;
|
||||
use rustc_index::IndexVec;
|
||||
use rustc_infer::infer::NllRegionVariableOrigin;
|
||||
use rustc_middle::ty::fold::TypeFoldable;
|
||||
use rustc_middle::ty::print::with_no_trimmed_paths;
|
||||
use rustc_middle::ty::{self, InlineConstArgs, InlineConstArgsParts, RegionVid, Ty, TyCtxt};
|
||||
use rustc_middle::ty::{GenericArgs, GenericArgsRef};
|
||||
use rustc_span::symbol::{kw, sym};
|
||||
use rustc_span::Symbol;
|
||||
use std::iter;
|
||||
|
||||
use crate::renumber::{BoundRegionInfo, RegionCtxt};
|
||||
use crate::renumber::RegionCtxt;
|
||||
use crate::BorrowckInferCtxt;
|
||||
|
||||
#[derive(Debug)]
|
||||
@ -332,10 +333,16 @@ impl<'tcx> UniversalRegions<'tcx> {
|
||||
pub(crate) fn annotate(&self, tcx: TyCtxt<'tcx>, err: &mut Diagnostic) {
|
||||
match self.defining_ty {
|
||||
DefiningTy::Closure(def_id, args) => {
|
||||
let v = with_no_trimmed_paths!(
|
||||
args[tcx.generics_of(def_id).parent_count..]
|
||||
.iter()
|
||||
.map(|arg| arg.to_string())
|
||||
.collect::<Vec<_>>()
|
||||
);
|
||||
err.note(format!(
|
||||
"defining type: {} with closure args {:#?}",
|
||||
"defining type: {} with closure args [\n {},\n]",
|
||||
tcx.def_path_str_with_args(def_id, args),
|
||||
&args[tcx.generics_of(def_id).parent_count..],
|
||||
v.join(",\n "),
|
||||
));
|
||||
|
||||
// FIXME: It'd be nice to print the late-bound regions
|
||||
@ -348,10 +355,16 @@ impl<'tcx> UniversalRegions<'tcx> {
|
||||
});
|
||||
}
|
||||
DefiningTy::Generator(def_id, args, _) => {
|
||||
let v = with_no_trimmed_paths!(
|
||||
args[tcx.generics_of(def_id).parent_count..]
|
||||
.iter()
|
||||
.map(|arg| arg.to_string())
|
||||
.collect::<Vec<_>>()
|
||||
);
|
||||
err.note(format!(
|
||||
"defining type: {} with generator args {:#?}",
|
||||
"defining type: {} with generator args [\n {},\n]",
|
||||
tcx.def_path_str_with_args(def_id, args),
|
||||
&args[tcx.generics_of(def_id).parent_count..],
|
||||
v.join(",\n "),
|
||||
));
|
||||
|
||||
// FIXME: As above, we'd like to print out the region
|
||||
@ -433,9 +446,7 @@ impl<'cx, 'tcx> UniversalRegionsBuilder<'cx, 'tcx> {
|
||||
if !indices.indices.contains_key(&r) {
|
||||
let region_vid = {
|
||||
let name = r.get_name_or_anon();
|
||||
self.infcx.next_nll_region_var(FR, || {
|
||||
RegionCtxt::LateBound(BoundRegionInfo::Name(name))
|
||||
})
|
||||
self.infcx.next_nll_region_var(FR, || RegionCtxt::LateBound(name))
|
||||
};
|
||||
|
||||
debug!(?region_vid);
|
||||
@ -467,9 +478,7 @@ impl<'cx, 'tcx> UniversalRegionsBuilder<'cx, 'tcx> {
|
||||
if !indices.indices.contains_key(&r) {
|
||||
let region_vid = {
|
||||
let name = r.get_name_or_anon();
|
||||
self.infcx.next_nll_region_var(FR, || {
|
||||
RegionCtxt::LateBound(BoundRegionInfo::Name(name))
|
||||
})
|
||||
self.infcx.next_nll_region_var(FR, || RegionCtxt::LateBound(name))
|
||||
};
|
||||
|
||||
debug!(?region_vid);
|
||||
@ -567,7 +576,7 @@ impl<'cx, 'tcx> UniversalRegionsBuilder<'cx, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
BodyOwnerKind::Const | BodyOwnerKind::Static(..) => {
|
||||
BodyOwnerKind::Const { .. } | BodyOwnerKind::Static(..) => {
|
||||
let identity_args = GenericArgs::identity_for_item(tcx, typeck_root_def_id);
|
||||
if self.mir_def.to_def_id() == typeck_root_def_id {
|
||||
let args =
|
||||
@ -630,10 +639,9 @@ impl<'cx, 'tcx> UniversalRegionsBuilder<'cx, 'tcx> {
|
||||
};
|
||||
|
||||
let global_mapping = iter::once((tcx.lifetimes.re_static, fr_static));
|
||||
let subst_mapping =
|
||||
iter::zip(identity_args.regions(), fr_args.regions().map(|r| r.as_var()));
|
||||
let arg_mapping = iter::zip(identity_args.regions(), fr_args.regions().map(|r| r.as_var()));
|
||||
|
||||
UniversalRegionIndices { indices: global_mapping.chain(subst_mapping).collect(), fr_static }
|
||||
UniversalRegionIndices { indices: global_mapping.chain(arg_mapping).collect(), fr_static }
|
||||
}
|
||||
|
||||
fn compute_inputs_and_output(
|
||||
@ -783,7 +791,7 @@ impl<'cx, 'tcx> InferCtxtExt<'tcx> for BorrowckInferCtxt<'cx, 'tcx> {
|
||||
_ => sym::anon,
|
||||
};
|
||||
|
||||
self.next_nll_region_var(origin, || RegionCtxt::Bound(BoundRegionInfo::Name(name)))
|
||||
self.next_nll_region_var(origin, || RegionCtxt::Bound(name))
|
||||
};
|
||||
|
||||
indices.insert_late_bound_region(liberated_region, region_vid.as_var());
|
||||
@ -813,9 +821,7 @@ impl<'cx, 'tcx> InferCtxtExt<'tcx> for BorrowckInferCtxt<'cx, 'tcx> {
|
||||
if !indices.indices.contains_key(&r) {
|
||||
let region_vid = {
|
||||
let name = r.get_name_or_anon();
|
||||
self.next_nll_region_var(FR, || {
|
||||
RegionCtxt::LateBound(BoundRegionInfo::Name(name))
|
||||
})
|
||||
self.next_nll_region_var(FR, || RegionCtxt::LateBound(name))
|
||||
};
|
||||
|
||||
debug!(?region_vid);
|
||||
@ -835,9 +841,7 @@ impl<'cx, 'tcx> InferCtxtExt<'tcx> for BorrowckInferCtxt<'cx, 'tcx> {
|
||||
if !indices.indices.contains_key(&r) {
|
||||
let region_vid = {
|
||||
let name = r.get_name_or_anon();
|
||||
self.next_nll_region_var(FR, || {
|
||||
RegionCtxt::LateBound(BoundRegionInfo::Name(name))
|
||||
})
|
||||
self.next_nll_region_var(FR, || RegionCtxt::LateBound(name))
|
||||
};
|
||||
|
||||
indices.insert_late_bound_region(r, region_vid.as_var());
|
||||
|
||||
@ -137,6 +137,8 @@ builtin_macros_format_positional_after_named = positional arguments cannot follo
|
||||
.label = positional arguments must be before named arguments
|
||||
.named_args = named argument
|
||||
|
||||
builtin_macros_format_remove_raw_ident = remove the `r#`
|
||||
|
||||
builtin_macros_format_requires_string = requires at least a format string argument
|
||||
|
||||
builtin_macros_format_string_invalid = invalid format string: {$desc}
|
||||
@ -165,6 +167,8 @@ builtin_macros_format_unused_arg = {$named ->
|
||||
builtin_macros_format_unused_args = multiple unused formatting arguments
|
||||
.label = multiple missing formatting specifiers
|
||||
|
||||
builtin_macros_format_use_positional = consider using a positional formatting argument instead
|
||||
|
||||
builtin_macros_global_asm_clobber_abi = `clobber_abi` cannot be used with `global_asm!`
|
||||
|
||||
builtin_macros_invalid_crate_attribute = invalid crate attribute
|
||||
@ -205,8 +209,6 @@ builtin_macros_requires_cfg_pattern =
|
||||
|
||||
builtin_macros_should_panic = functions using `#[should_panic]` must return `()`
|
||||
|
||||
builtin_macros_sugg = consider using a positional formatting argument instead
|
||||
|
||||
builtin_macros_test_arg_non_lifetime = functions used as tests can not have any non-lifetime generic parameters
|
||||
|
||||
builtin_macros_test_args = functions used as tests can not have any arguments
|
||||
|
||||
@ -241,7 +241,7 @@ impl<'cx, 'a> Context<'cx, 'a> {
|
||||
self.manage_cond_expr(prefix);
|
||||
self.manage_cond_expr(suffix);
|
||||
}
|
||||
ExprKind::Let(_, local_expr, _) => {
|
||||
ExprKind::Let(_, local_expr, _, _) => {
|
||||
self.manage_cond_expr(local_expr);
|
||||
}
|
||||
ExprKind::Match(local_expr, _) => {
|
||||
|
||||
@ -41,7 +41,7 @@ pub fn expand_deriving_const_param_ty(
|
||||
path: path_std!(marker::ConstParamTy),
|
||||
skip_path_as_bound: false,
|
||||
needs_copy_as_bound_if_packed: false,
|
||||
additional_bounds: Vec::new(),
|
||||
additional_bounds: vec![ty::Ty::Path(path_std!(cmp::Eq))],
|
||||
supports_unions: false,
|
||||
methods: Vec::new(),
|
||||
associated_types: Vec::new(),
|
||||
|
||||
@ -18,6 +18,20 @@ pub fn expand_deriving_eq(
|
||||
is_const: bool,
|
||||
) {
|
||||
let span = cx.with_def_site_ctxt(span);
|
||||
|
||||
let structural_trait_def = TraitDef {
|
||||
span,
|
||||
path: path_std!(marker::StructuralEq),
|
||||
skip_path_as_bound: true, // crucial!
|
||||
needs_copy_as_bound_if_packed: false,
|
||||
additional_bounds: Vec::new(),
|
||||
supports_unions: true,
|
||||
methods: Vec::new(),
|
||||
associated_types: Vec::new(),
|
||||
is_const: false,
|
||||
};
|
||||
structural_trait_def.expand(cx, mitem, item, push);
|
||||
|
||||
let trait_def = TraitDef {
|
||||
span,
|
||||
path: path_std!(cmp::Eq),
|
||||
@ -34,7 +48,7 @@ pub fn expand_deriving_eq(
|
||||
attributes: thin_vec![
|
||||
cx.attr_word(sym::inline, span),
|
||||
cx.attr_nested_word(sym::doc, sym::hidden, span),
|
||||
cx.attr_word(sym::no_coverage, span)
|
||||
cx.attr_nested_word(sym::coverage, sym::off, span)
|
||||
],
|
||||
fieldless_variants_strategy: FieldlessVariantsStrategy::Unify,
|
||||
combine_substructure: combine_substructure(Box::new(|a, b, c| {
|
||||
@ -44,9 +58,6 @@ pub fn expand_deriving_eq(
|
||||
associated_types: Vec::new(),
|
||||
is_const,
|
||||
};
|
||||
|
||||
super::inject_impl_of_structural_trait(cx, span, item, path_std!(marker::StructuralEq), push);
|
||||
|
||||
trait_def.expand_ext(cx, mitem, item, push, true)
|
||||
}
|
||||
|
||||
|
||||
@ -72,13 +72,20 @@ pub fn expand_deriving_partial_eq(
|
||||
BlockOrExpr::new_expr(expr)
|
||||
}
|
||||
|
||||
super::inject_impl_of_structural_trait(
|
||||
cx,
|
||||
let structural_trait_def = TraitDef {
|
||||
span,
|
||||
item,
|
||||
path_std!(marker::StructuralPartialEq),
|
||||
push,
|
||||
);
|
||||
path: path_std!(marker::StructuralPartialEq),
|
||||
skip_path_as_bound: true, // crucial!
|
||||
needs_copy_as_bound_if_packed: false,
|
||||
additional_bounds: Vec::new(),
|
||||
// We really don't support unions, but that's already checked by the impl generated below;
|
||||
// a second check here would lead to redundant error messages.
|
||||
supports_unions: true,
|
||||
methods: Vec::new(),
|
||||
associated_types: Vec::new(),
|
||||
is_const: false,
|
||||
};
|
||||
structural_trait_def.expand(cx, mitem, item, push);
|
||||
|
||||
// No need to generate `ne`, the default suffices, and not generating it is
|
||||
// faster.
|
||||
|
||||
@ -88,7 +88,7 @@
|
||||
//!
|
||||
//! When generating the `expr` for the `A` impl, the `SubstructureFields` is
|
||||
//!
|
||||
//! ```{.text}
|
||||
//! ```text
|
||||
//! Struct(vec![FieldInfo {
|
||||
//! span: <span of x>
|
||||
//! name: Some(<ident of x>),
|
||||
@ -99,7 +99,7 @@
|
||||
//!
|
||||
//! For the `B` impl, called with `B(a)` and `B(b)`,
|
||||
//!
|
||||
//! ```{.text}
|
||||
//! ```text
|
||||
//! Struct(vec![FieldInfo {
|
||||
//! span: <span of `i32`>,
|
||||
//! name: None,
|
||||
@ -113,7 +113,7 @@
|
||||
//! When generating the `expr` for a call with `self == C0(a)` and `other
|
||||
//! == C0(b)`, the SubstructureFields is
|
||||
//!
|
||||
//! ```{.text}
|
||||
//! ```text
|
||||
//! EnumMatching(0, <ast::Variant for C0>,
|
||||
//! vec![FieldInfo {
|
||||
//! span: <span of i32>
|
||||
@ -125,7 +125,7 @@
|
||||
//!
|
||||
//! For `C1 {x}` and `C1 {x}`,
|
||||
//!
|
||||
//! ```{.text}
|
||||
//! ```text
|
||||
//! EnumMatching(1, <ast::Variant for C1>,
|
||||
//! vec![FieldInfo {
|
||||
//! span: <span of x>
|
||||
@ -137,7 +137,7 @@
|
||||
//!
|
||||
//! For the tags,
|
||||
//!
|
||||
//! ```{.text}
|
||||
//! ```text
|
||||
//! EnumTag(
|
||||
//! &[<ident of self tag>, <ident of other tag>], <expr to combine with>)
|
||||
//! ```
|
||||
@ -149,7 +149,7 @@
|
||||
//!
|
||||
//! A static method on the types above would result in,
|
||||
//!
|
||||
//! ```{.text}
|
||||
//! ```text
|
||||
//! StaticStruct(<ast::VariantData of A>, Named(vec![(<ident of x>, <span of x>)]))
|
||||
//!
|
||||
//! StaticStruct(<ast::VariantData of B>, Unnamed(vec![<span of x>]))
|
||||
@ -711,7 +711,9 @@ impl<'a> TraitDef<'a> {
|
||||
.collect();
|
||||
|
||||
// Require the current trait.
|
||||
bounds.push(cx.trait_bound(trait_path.clone(), self.is_const));
|
||||
if !self.skip_path_as_bound {
|
||||
bounds.push(cx.trait_bound(trait_path.clone(), self.is_const));
|
||||
}
|
||||
|
||||
// Add a `Copy` bound if required.
|
||||
if is_packed && self.needs_copy_as_bound_if_packed {
|
||||
@ -722,15 +724,17 @@ impl<'a> TraitDef<'a> {
|
||||
));
|
||||
}
|
||||
|
||||
let predicate = ast::WhereBoundPredicate {
|
||||
span: self.span,
|
||||
bound_generic_params: field_ty_param.bound_generic_params,
|
||||
bounded_ty: field_ty_param.ty,
|
||||
bounds,
|
||||
};
|
||||
if !bounds.is_empty() {
|
||||
let predicate = ast::WhereBoundPredicate {
|
||||
span: self.span,
|
||||
bound_generic_params: field_ty_param.bound_generic_params,
|
||||
bounded_ty: field_ty_param.ty,
|
||||
bounds,
|
||||
};
|
||||
|
||||
let predicate = ast::WherePredicate::BoundPredicate(predicate);
|
||||
where_clause.predicates.push(predicate);
|
||||
let predicate = ast::WherePredicate::BoundPredicate(predicate);
|
||||
where_clause.predicates.push(predicate);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -2,9 +2,9 @@
|
||||
|
||||
use rustc_ast as ast;
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::{GenericArg, Impl, ItemKind, MetaItem};
|
||||
use rustc_ast::{GenericArg, MetaItem};
|
||||
use rustc_expand::base::{Annotatable, ExpandResult, ExtCtxt, MultiItemModifier};
|
||||
use rustc_span::symbol::{sym, Ident, Symbol};
|
||||
use rustc_span::symbol::{sym, Symbol};
|
||||
use rustc_span::Span;
|
||||
use thin_vec::{thin_vec, ThinVec};
|
||||
|
||||
@ -116,100 +116,6 @@ fn call_unreachable(cx: &ExtCtxt<'_>, span: Span) -> P<ast::Expr> {
|
||||
}))
|
||||
}
|
||||
|
||||
// Injects `impl<...> Structural for ItemType<...> { }`. In particular,
|
||||
// does *not* add `where T: Structural` for parameters `T` in `...`.
|
||||
// (That's the main reason we cannot use TraitDef here.)
|
||||
fn inject_impl_of_structural_trait(
|
||||
cx: &mut ExtCtxt<'_>,
|
||||
span: Span,
|
||||
item: &Annotatable,
|
||||
structural_path: generic::ty::Path,
|
||||
push: &mut dyn FnMut(Annotatable),
|
||||
) {
|
||||
let Annotatable::Item(item) = item else {
|
||||
unreachable!();
|
||||
};
|
||||
|
||||
let generics = match &item.kind {
|
||||
ItemKind::Struct(_, generics) | ItemKind::Enum(_, generics) => generics,
|
||||
// Do not inject `impl Structural for Union`. (`PartialEq` does not
|
||||
// support unions, so we will see error downstream.)
|
||||
ItemKind::Union(..) => return,
|
||||
_ => unreachable!(),
|
||||
};
|
||||
|
||||
// Create generics param list for where clauses and impl headers
|
||||
let mut generics = generics.clone();
|
||||
|
||||
let ctxt = span.ctxt();
|
||||
|
||||
// Create the type of `self`.
|
||||
//
|
||||
// in addition, remove defaults from generic params (impls cannot have them).
|
||||
let self_params: Vec<_> = generics
|
||||
.params
|
||||
.iter_mut()
|
||||
.map(|param| match &mut param.kind {
|
||||
ast::GenericParamKind::Lifetime => ast::GenericArg::Lifetime(
|
||||
cx.lifetime(param.ident.span.with_ctxt(ctxt), param.ident),
|
||||
),
|
||||
ast::GenericParamKind::Type { default } => {
|
||||
*default = None;
|
||||
ast::GenericArg::Type(cx.ty_ident(param.ident.span.with_ctxt(ctxt), param.ident))
|
||||
}
|
||||
ast::GenericParamKind::Const { ty: _, kw_span: _, default } => {
|
||||
*default = None;
|
||||
ast::GenericArg::Const(
|
||||
cx.const_ident(param.ident.span.with_ctxt(ctxt), param.ident),
|
||||
)
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
let type_ident = item.ident;
|
||||
|
||||
let trait_ref = cx.trait_ref(structural_path.to_path(cx, span, type_ident, &generics));
|
||||
let self_type = cx.ty_path(cx.path_all(span, false, vec![type_ident], self_params));
|
||||
|
||||
// It would be nice to also encode constraint `where Self: Eq` (by adding it
|
||||
// onto `generics` cloned above). Unfortunately, that strategy runs afoul of
|
||||
// rust-lang/rust#48214. So we perform that additional check in the compiler
|
||||
// itself, instead of encoding it here.
|
||||
|
||||
// Keep the lint and stability attributes of the original item, to control
|
||||
// how the generated implementation is linted.
|
||||
let mut attrs = ast::AttrVec::new();
|
||||
attrs.extend(
|
||||
item.attrs
|
||||
.iter()
|
||||
.filter(|a| {
|
||||
[sym::allow, sym::warn, sym::deny, sym::forbid, sym::stable, sym::unstable]
|
||||
.contains(&a.name_or_empty())
|
||||
})
|
||||
.cloned(),
|
||||
);
|
||||
// Mark as `automatically_derived` to avoid some silly lints.
|
||||
attrs.push(cx.attr_word(sym::automatically_derived, span));
|
||||
|
||||
let newitem = cx.item(
|
||||
span,
|
||||
Ident::empty(),
|
||||
attrs,
|
||||
ItemKind::Impl(Box::new(Impl {
|
||||
unsafety: ast::Unsafe::No,
|
||||
polarity: ast::ImplPolarity::Positive,
|
||||
defaultness: ast::Defaultness::Final,
|
||||
constness: ast::Const::No,
|
||||
generics,
|
||||
of_trait: Some(trait_ref),
|
||||
self_ty: self_type,
|
||||
items: ThinVec::new(),
|
||||
})),
|
||||
);
|
||||
|
||||
push(Annotatable::Item(newitem));
|
||||
}
|
||||
|
||||
fn assert_ty_bounds(
|
||||
cx: &mut ExtCtxt<'_>,
|
||||
stmts: &mut ThinVec<ast::Stmt>,
|
||||
|
||||
@ -539,18 +539,29 @@ pub(crate) struct InvalidFormatStringLabel {
|
||||
}
|
||||
|
||||
#[derive(Subdiagnostic)]
|
||||
#[multipart_suggestion(
|
||||
builtin_macros_sugg,
|
||||
style = "verbose",
|
||||
applicability = "machine-applicable"
|
||||
)]
|
||||
pub(crate) struct InvalidFormatStringSuggestion {
|
||||
#[suggestion_part(code = "{len}")]
|
||||
pub(crate) captured: Span,
|
||||
pub(crate) len: String,
|
||||
#[suggestion_part(code = ", {arg}")]
|
||||
pub(crate) span: Span,
|
||||
pub(crate) arg: String,
|
||||
pub(crate) enum InvalidFormatStringSuggestion {
|
||||
#[multipart_suggestion(
|
||||
builtin_macros_format_use_positional,
|
||||
style = "verbose",
|
||||
applicability = "machine-applicable"
|
||||
)]
|
||||
UsePositional {
|
||||
#[suggestion_part(code = "{len}")]
|
||||
captured: Span,
|
||||
len: String,
|
||||
#[suggestion_part(code = ", {arg}")]
|
||||
span: Span,
|
||||
arg: String,
|
||||
},
|
||||
#[suggestion(
|
||||
builtin_macros_format_remove_raw_ident,
|
||||
code = "",
|
||||
applicability = "machine-applicable"
|
||||
)]
|
||||
RemoveRawIdent {
|
||||
#[primary_span]
|
||||
span: Span,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
|
||||
@ -260,20 +260,29 @@ fn make_format_args(
|
||||
if let Some((label, span)) = err.secondary_label && is_source_literal {
|
||||
e.label_ = Some(errors::InvalidFormatStringLabel { span: fmt_span.from_inner(InnerSpan::new(span.start, span.end)), label } );
|
||||
}
|
||||
if err.should_be_replaced_with_positional_argument {
|
||||
let captured_arg_span =
|
||||
fmt_span.from_inner(InnerSpan::new(err.span.start, err.span.end));
|
||||
if let Ok(arg) = ecx.source_map().span_to_snippet(captured_arg_span) {
|
||||
let span = match args.unnamed_args().last() {
|
||||
Some(arg) => arg.expr.span,
|
||||
None => fmt_span,
|
||||
};
|
||||
e.sugg_ = Some(errors::InvalidFormatStringSuggestion {
|
||||
captured: captured_arg_span,
|
||||
len: args.unnamed_args().len().to_string(),
|
||||
span: span.shrink_to_hi(),
|
||||
arg,
|
||||
});
|
||||
match err.suggestion {
|
||||
parse::Suggestion::None => {}
|
||||
parse::Suggestion::UsePositional => {
|
||||
let captured_arg_span =
|
||||
fmt_span.from_inner(InnerSpan::new(err.span.start, err.span.end));
|
||||
if let Ok(arg) = ecx.source_map().span_to_snippet(captured_arg_span) {
|
||||
let span = match args.unnamed_args().last() {
|
||||
Some(arg) => arg.expr.span,
|
||||
None => fmt_span,
|
||||
};
|
||||
e.sugg_ = Some(errors::InvalidFormatStringSuggestion::UsePositional {
|
||||
captured: captured_arg_span,
|
||||
len: args.unnamed_args().len().to_string(),
|
||||
span: span.shrink_to_hi(),
|
||||
arg,
|
||||
});
|
||||
}
|
||||
}
|
||||
parse::Suggestion::RemoveRawIdent(span) => {
|
||||
if is_source_literal {
|
||||
let span = fmt_span.from_inner(InnerSpan::new(span.start, span.end));
|
||||
e.sugg_ = Some(errors::InvalidFormatStringSuggestion::RemoveRawIdent { span })
|
||||
}
|
||||
}
|
||||
}
|
||||
ecx.emit_err(e);
|
||||
|
||||
@ -217,7 +217,7 @@ pub fn expand_include_bytes(
|
||||
};
|
||||
match cx.source_map().load_binary_file(&file) {
|
||||
Ok(bytes) => {
|
||||
let expr = cx.expr(sp, ast::ExprKind::IncludedBytes(bytes.into()));
|
||||
let expr = cx.expr(sp, ast::ExprKind::IncludedBytes(bytes));
|
||||
base::MacEager::expr(expr)
|
||||
}
|
||||
Err(e) => {
|
||||
|
||||
@ -254,7 +254,7 @@ fn generate_test_harness(
|
||||
let expn_id = ext_cx.resolver.expansion_for_ast_pass(
|
||||
DUMMY_SP,
|
||||
AstPass::TestHarness,
|
||||
&[sym::test, sym::rustc_attrs, sym::no_coverage],
|
||||
&[sym::test, sym::rustc_attrs, sym::coverage_attribute],
|
||||
None,
|
||||
);
|
||||
let def_site = DUMMY_SP.with_def_site_ctxt(expn_id.to_expn_id());
|
||||
@ -335,8 +335,8 @@ fn mk_main(cx: &mut TestCtxt<'_>) -> P<ast::Item> {
|
||||
|
||||
// #[rustc_main]
|
||||
let main_attr = ecx.attr_word(sym::rustc_main, sp);
|
||||
// #[no_coverage]
|
||||
let no_coverage_attr = ecx.attr_word(sym::no_coverage, sp);
|
||||
// #[coverage(off)]
|
||||
let coverage_attr = ecx.attr_nested_word(sym::coverage, sym::off, sp);
|
||||
|
||||
// pub fn main() { ... }
|
||||
let main_ret_ty = ecx.ty(sp, ast::TyKind::Tup(ThinVec::new()));
|
||||
@ -366,7 +366,7 @@ fn mk_main(cx: &mut TestCtxt<'_>) -> P<ast::Item> {
|
||||
|
||||
let main = P(ast::Item {
|
||||
ident: main_id,
|
||||
attrs: thin_vec![main_attr, no_coverage_attr],
|
||||
attrs: thin_vec![main_attr, coverage_attr],
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
kind: main,
|
||||
vis: ast::Visibility { span: sp, kind: ast::VisibilityKind::Public, tokens: None },
|
||||
|
||||
@ -54,7 +54,7 @@ These are a few functions that allow you to easily run rust code from the shell
|
||||
|
||||
```bash
|
||||
function jit_naked() {
|
||||
echo "$@" | $cg_clif_dir/dist/rustc-clif - -Zunstable-features -Cllvm-args=mode=jit -Cprefer-dynamic
|
||||
echo "$@" | $cg_clif_dir/dist/rustc-clif - -Zunstable-options -Cllvm-args=mode=jit-lazy -Cprefer-dynamic
|
||||
}
|
||||
|
||||
function jit() {
|
||||
|
||||
@ -4,9 +4,9 @@ version = 3
|
||||
|
||||
[[package]]
|
||||
name = "addr2line"
|
||||
version = "0.20.0"
|
||||
version = "0.21.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f4fa78e18c64fce05e902adecd7a5eed15a5e0a3439f7b0e169f0252214865e3"
|
||||
checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb"
|
||||
dependencies = [
|
||||
"compiler_builtins",
|
||||
"gimli",
|
||||
@ -140,9 +140,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "gimli"
|
||||
version = "0.27.2"
|
||||
version = "0.28.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ad0a93d233ebf96623465aad4046a8d3aa4da22d4f4beba5388838c8a434bbb4"
|
||||
checksum = "6fb8d784f27acf97159b40fc4db5ecd8aa23b9ad5ef69cdd136d3bc80665f0c0"
|
||||
dependencies = [
|
||||
"compiler_builtins",
|
||||
"rustc-std-workspace-alloc",
|
||||
@ -205,9 +205,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "object"
|
||||
version = "0.31.1"
|
||||
version = "0.32.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8bda667d9f2b5051b8833f59f3bf748b28ef54f850f4fcb389a252aa383866d1"
|
||||
checksum = "77ac5bbd07aea88c60a577a1ce218075ffd59208b2d7ca97adf9bfc5aeb21ebe"
|
||||
dependencies = [
|
||||
"compiler_builtins",
|
||||
"memchr",
|
||||
|
||||
@ -1,3 +1,3 @@
|
||||
[toolchain]
|
||||
channel = "nightly-2023-08-08"
|
||||
channel = "nightly-2023-09-06"
|
||||
components = ["rust-src", "rustc-dev", "llvm-tools"]
|
||||
|
||||
@ -100,9 +100,9 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
stack = &stack[..index + ENCODE_METADATA.len()];
|
||||
}
|
||||
|
||||
const SUBST_AND_NORMALIZE_ERASING_REGIONS: &str = "rustc_middle::ty::normalize_erasing_regions::<impl rustc_middle::ty::context::TyCtxt>::subst_and_normalize_erasing_regions";
|
||||
if let Some(index) = stack.find(SUBST_AND_NORMALIZE_ERASING_REGIONS) {
|
||||
stack = &stack[..index + SUBST_AND_NORMALIZE_ERASING_REGIONS.len()];
|
||||
const INSTANTIATE_AND_NORMALIZE_ERASING_REGIONS: &str = "rustc_middle::ty::normalize_erasing_regions::<impl rustc_middle::ty::context::TyCtxt>::instantiate_and_normalize_erasing_regions";
|
||||
if let Some(index) = stack.find(INSTANTIATE_AND_NORMALIZE_ERASING_REGIONS) {
|
||||
stack = &stack[..index + INSTANTIATE_AND_NORMALIZE_ERASING_REGIONS.len()];
|
||||
}
|
||||
|
||||
const NORMALIZE_ERASING_LATE_BOUND_REGIONS: &str = "rustc_middle::ty::normalize_erasing_regions::<impl rustc_middle::ty::context::TyCtxt>::normalize_erasing_late_bound_regions";
|
||||
|
||||
@ -45,6 +45,7 @@ rm tests/ui/proc-macro/quote-debug.rs
|
||||
rm tests/ui/proc-macro/no-missing-docs.rs
|
||||
rm tests/ui/rust-2018/proc-macro-crate-in-paths.rs
|
||||
rm tests/ui/proc-macro/allowed-signatures.rs
|
||||
rm tests/ui/proc-macro/no-mangle-in-proc-macro-issue-111888.rs
|
||||
|
||||
# vendor intrinsics
|
||||
rm tests/ui/sse2.rs # cpuid not supported, so sse2 not detected
|
||||
@ -114,6 +115,7 @@ rm tests/ui/mir/mir_misc_casts.rs # depends on deduplication of constants
|
||||
rm tests/ui/mir/mir_raw_fat_ptr.rs # same
|
||||
rm tests/ui/consts/issue-33537.rs # same
|
||||
rm tests/ui/layout/valid_range_oob.rs # different ICE message
|
||||
rm tests/ui/const-generics/generic_const_exprs/issue-80742.rs # gives error instead of ICE with cg_clif
|
||||
|
||||
rm tests/ui/consts/issue-miri-1910.rs # different error message
|
||||
rm tests/ui/consts/offset_ub.rs # same
|
||||
|
||||
@ -39,7 +39,7 @@ fn clif_sig_from_fn_abi<'tcx>(
|
||||
pub(crate) fn conv_to_call_conv(sess: &Session, c: Conv, default_call_conv: CallConv) -> CallConv {
|
||||
match c {
|
||||
Conv::Rust | Conv::C => default_call_conv,
|
||||
Conv::RustCold => CallConv::Cold,
|
||||
Conv::Cold | Conv::PreserveMost | Conv::PreserveAll => CallConv::Cold,
|
||||
Conv::X86_64SysV => CallConv::SystemV,
|
||||
Conv::X86_64Win64 => CallConv::WindowsFastcall,
|
||||
|
||||
|
||||
@ -100,11 +100,11 @@ impl<'tcx> ArgAbiExt<'tcx> for ArgAbi<'tcx, Ty<'tcx>> {
|
||||
}
|
||||
_ => unreachable!("{:?}", self.layout.abi),
|
||||
},
|
||||
PassMode::Cast(ref cast, pad_i32) => {
|
||||
PassMode::Cast { ref cast, pad_i32 } => {
|
||||
assert!(!pad_i32, "padding support not yet implemented");
|
||||
cast_target_to_abi_params(cast)
|
||||
}
|
||||
PassMode::Indirect { attrs, extra_attrs: None, on_stack } => {
|
||||
PassMode::Indirect { attrs, meta_attrs: None, on_stack } => {
|
||||
if on_stack {
|
||||
// Abi requires aligning struct size to pointer size
|
||||
let size = self.layout.size.align_to(tcx.data_layout.pointer_align.abi);
|
||||
@ -117,11 +117,11 @@ impl<'tcx> ArgAbiExt<'tcx> for ArgAbi<'tcx, Ty<'tcx>> {
|
||||
smallvec![apply_arg_attrs_to_abi_param(AbiParam::new(pointer_ty(tcx)), attrs)]
|
||||
}
|
||||
}
|
||||
PassMode::Indirect { attrs, extra_attrs: Some(extra_attrs), on_stack } => {
|
||||
PassMode::Indirect { attrs, meta_attrs: Some(meta_attrs), on_stack } => {
|
||||
assert!(!on_stack);
|
||||
smallvec![
|
||||
apply_arg_attrs_to_abi_param(AbiParam::new(pointer_ty(tcx)), attrs),
|
||||
apply_arg_attrs_to_abi_param(AbiParam::new(pointer_ty(tcx)), extra_attrs),
|
||||
apply_arg_attrs_to_abi_param(AbiParam::new(pointer_ty(tcx)), meta_attrs),
|
||||
]
|
||||
}
|
||||
}
|
||||
@ -148,14 +148,14 @@ impl<'tcx> ArgAbiExt<'tcx> for ArgAbi<'tcx, Ty<'tcx>> {
|
||||
}
|
||||
_ => unreachable!("{:?}", self.layout.abi),
|
||||
},
|
||||
PassMode::Cast(ref cast, _) => {
|
||||
PassMode::Cast { ref cast, .. } => {
|
||||
(None, cast_target_to_abi_params(cast).into_iter().collect())
|
||||
}
|
||||
PassMode::Indirect { attrs: _, extra_attrs: None, on_stack } => {
|
||||
PassMode::Indirect { attrs: _, meta_attrs: None, on_stack } => {
|
||||
assert!(!on_stack);
|
||||
(Some(AbiParam::special(pointer_ty(tcx), ArgumentPurpose::StructReturn)), vec![])
|
||||
}
|
||||
PassMode::Indirect { attrs: _, extra_attrs: Some(_), on_stack: _ } => {
|
||||
PassMode::Indirect { attrs: _, meta_attrs: Some(_), on_stack: _ } => {
|
||||
unreachable!("unsized return value")
|
||||
}
|
||||
}
|
||||
@ -229,7 +229,7 @@ pub(super) fn adjust_arg_for_abi<'tcx>(
|
||||
let (a, b) = arg.load_scalar_pair(fx);
|
||||
smallvec![a, b]
|
||||
}
|
||||
PassMode::Cast(ref cast, _) => to_casted_value(fx, arg, cast),
|
||||
PassMode::Cast { ref cast, .. } => to_casted_value(fx, arg, cast),
|
||||
PassMode::Indirect { .. } => {
|
||||
if is_owned {
|
||||
match arg.force_stack(fx) {
|
||||
@ -287,14 +287,14 @@ pub(super) fn cvalue_for_param<'tcx>(
|
||||
assert_eq!(block_params.len(), 2, "{:?}", block_params);
|
||||
Some(CValue::by_val_pair(block_params[0], block_params[1], arg_abi.layout))
|
||||
}
|
||||
PassMode::Cast(ref cast, _) => {
|
||||
PassMode::Cast { ref cast, .. } => {
|
||||
Some(from_casted_value(fx, &block_params, arg_abi.layout, cast))
|
||||
}
|
||||
PassMode::Indirect { attrs: _, extra_attrs: None, on_stack: _ } => {
|
||||
PassMode::Indirect { attrs: _, meta_attrs: None, on_stack: _ } => {
|
||||
assert_eq!(block_params.len(), 1, "{:?}", block_params);
|
||||
Some(CValue::by_ref(Pointer::new(block_params[0]), arg_abi.layout))
|
||||
}
|
||||
PassMode::Indirect { attrs: _, extra_attrs: Some(_), on_stack: _ } => {
|
||||
PassMode::Indirect { attrs: _, meta_attrs: Some(_), on_stack: _ } => {
|
||||
assert_eq!(block_params.len(), 2, "{:?}", block_params);
|
||||
Some(CValue::by_ref_unsized(
|
||||
Pointer::new(block_params[0]),
|
||||
|
||||
@ -13,7 +13,7 @@ pub(super) fn codegen_return_param<'tcx>(
|
||||
block_params_iter: &mut impl Iterator<Item = Value>,
|
||||
) -> CPlace<'tcx> {
|
||||
let (ret_place, ret_param): (_, SmallVec<[_; 2]>) = match fx.fn_abi.as_ref().unwrap().ret.mode {
|
||||
PassMode::Ignore | PassMode::Direct(_) | PassMode::Pair(_, _) | PassMode::Cast(..) => {
|
||||
PassMode::Ignore | PassMode::Direct(_) | PassMode::Pair(_, _) | PassMode::Cast { .. } => {
|
||||
let is_ssa =
|
||||
ssa_analyzed[RETURN_PLACE].is_ssa(fx, fx.fn_abi.as_ref().unwrap().ret.layout.ty);
|
||||
(
|
||||
@ -26,7 +26,7 @@ pub(super) fn codegen_return_param<'tcx>(
|
||||
smallvec![],
|
||||
)
|
||||
}
|
||||
PassMode::Indirect { attrs: _, extra_attrs: None, on_stack: _ } => {
|
||||
PassMode::Indirect { attrs: _, meta_attrs: None, on_stack: _ } => {
|
||||
let ret_param = block_params_iter.next().unwrap();
|
||||
assert_eq!(fx.bcx.func.dfg.value_type(ret_param), fx.pointer_type);
|
||||
(
|
||||
@ -34,7 +34,7 @@ pub(super) fn codegen_return_param<'tcx>(
|
||||
smallvec![ret_param],
|
||||
)
|
||||
}
|
||||
PassMode::Indirect { attrs: _, extra_attrs: Some(_), on_stack: _ } => {
|
||||
PassMode::Indirect { attrs: _, meta_attrs: Some(_), on_stack: _ } => {
|
||||
unreachable!("unsized return value")
|
||||
}
|
||||
};
|
||||
@ -62,7 +62,7 @@ pub(super) fn codegen_with_call_return_arg<'tcx>(
|
||||
) {
|
||||
let (ret_temp_place, return_ptr) = match ret_arg_abi.mode {
|
||||
PassMode::Ignore => (None, None),
|
||||
PassMode::Indirect { attrs: _, extra_attrs: None, on_stack: _ } => {
|
||||
PassMode::Indirect { attrs: _, meta_attrs: None, on_stack: _ } => {
|
||||
if let Some(ret_ptr) = ret_place.try_to_ptr() {
|
||||
// This is an optimization to prevent unnecessary copies of the return value when
|
||||
// the return place is already a memory place as opposed to a register.
|
||||
@ -73,10 +73,10 @@ pub(super) fn codegen_with_call_return_arg<'tcx>(
|
||||
(Some(place), Some(place.to_ptr().get_addr(fx)))
|
||||
}
|
||||
}
|
||||
PassMode::Indirect { attrs: _, extra_attrs: Some(_), on_stack: _ } => {
|
||||
PassMode::Indirect { attrs: _, meta_attrs: Some(_), on_stack: _ } => {
|
||||
unreachable!("unsized return value")
|
||||
}
|
||||
PassMode::Direct(_) | PassMode::Pair(_, _) | PassMode::Cast(..) => (None, None),
|
||||
PassMode::Direct(_) | PassMode::Pair(_, _) | PassMode::Cast { .. } => (None, None),
|
||||
};
|
||||
|
||||
let call_inst = f(fx, return_ptr);
|
||||
@ -93,21 +93,21 @@ pub(super) fn codegen_with_call_return_arg<'tcx>(
|
||||
ret_place
|
||||
.write_cvalue(fx, CValue::by_val_pair(ret_val_a, ret_val_b, ret_arg_abi.layout));
|
||||
}
|
||||
PassMode::Cast(ref cast, _) => {
|
||||
PassMode::Cast { ref cast, .. } => {
|
||||
let results =
|
||||
fx.bcx.inst_results(call_inst).iter().copied().collect::<SmallVec<[Value; 2]>>();
|
||||
let result =
|
||||
super::pass_mode::from_casted_value(fx, &results, ret_place.layout(), cast);
|
||||
ret_place.write_cvalue(fx, result);
|
||||
}
|
||||
PassMode::Indirect { attrs: _, extra_attrs: None, on_stack: _ } => {
|
||||
PassMode::Indirect { attrs: _, meta_attrs: None, on_stack: _ } => {
|
||||
if let Some(ret_temp_place) = ret_temp_place {
|
||||
// If ret_temp_place is None, it is not necessary to copy the return value.
|
||||
let ret_temp_value = ret_temp_place.to_cvalue(fx);
|
||||
ret_place.write_cvalue(fx, ret_temp_value);
|
||||
}
|
||||
}
|
||||
PassMode::Indirect { attrs: _, extra_attrs: Some(_), on_stack: _ } => {
|
||||
PassMode::Indirect { attrs: _, meta_attrs: Some(_), on_stack: _ } => {
|
||||
unreachable!("unsized return value")
|
||||
}
|
||||
}
|
||||
@ -116,10 +116,10 @@ pub(super) fn codegen_with_call_return_arg<'tcx>(
|
||||
/// Codegen a return instruction with the right return value(s) if any.
|
||||
pub(crate) fn codegen_return(fx: &mut FunctionCx<'_, '_, '_>) {
|
||||
match fx.fn_abi.as_ref().unwrap().ret.mode {
|
||||
PassMode::Ignore | PassMode::Indirect { attrs: _, extra_attrs: None, on_stack: _ } => {
|
||||
PassMode::Ignore | PassMode::Indirect { attrs: _, meta_attrs: None, on_stack: _ } => {
|
||||
fx.bcx.ins().return_(&[]);
|
||||
}
|
||||
PassMode::Indirect { attrs: _, extra_attrs: Some(_), on_stack: _ } => {
|
||||
PassMode::Indirect { attrs: _, meta_attrs: Some(_), on_stack: _ } => {
|
||||
unreachable!("unsized return value")
|
||||
}
|
||||
PassMode::Direct(_) => {
|
||||
@ -132,7 +132,7 @@ pub(crate) fn codegen_return(fx: &mut FunctionCx<'_, '_, '_>) {
|
||||
let (ret_val_a, ret_val_b) = place.to_cvalue(fx).load_scalar_pair(fx);
|
||||
fx.bcx.ins().return_(&[ret_val_a, ret_val_b]);
|
||||
}
|
||||
PassMode::Cast(ref cast, _) => {
|
||||
PassMode::Cast { ref cast, .. } => {
|
||||
let place = fx.get_local_place(RETURN_PLACE);
|
||||
let ret_val = place.to_cvalue(fx);
|
||||
let ret_vals = super::pass_mode::to_casted_value(fx, ret_val, cast);
|
||||
|
||||
@ -250,7 +250,10 @@ pub(crate) fn verify_func(
|
||||
}
|
||||
|
||||
fn codegen_fn_body(fx: &mut FunctionCx<'_, '_, '_>, start_block: Block) {
|
||||
if !crate::constant::check_constants(fx) {
|
||||
if let Err(err) =
|
||||
fx.mir.post_mono_checks(fx.tcx, ty::ParamEnv::reveal_all(), |c| Ok(fx.monomorphize(c)))
|
||||
{
|
||||
err.emit_err(fx.tcx);
|
||||
fx.bcx.append_block_params_for_function_params(fx.block_map[START_BLOCK]);
|
||||
fx.bcx.switch_to_block(fx.block_map[START_BLOCK]);
|
||||
// compilation should have been aborted
|
||||
@ -474,10 +477,10 @@ fn codegen_fn_body(fx: &mut FunctionCx<'_, '_, '_>, start_block: Block) {
|
||||
*destination,
|
||||
);
|
||||
}
|
||||
TerminatorKind::Terminate => {
|
||||
codegen_panic_cannot_unwind(fx, source_info);
|
||||
TerminatorKind::UnwindTerminate(reason) => {
|
||||
codegen_unwind_terminate(fx, source_info, *reason);
|
||||
}
|
||||
TerminatorKind::Resume => {
|
||||
TerminatorKind::UnwindResume => {
|
||||
// FIXME implement unwinding
|
||||
fx.bcx.ins().trap(TrapCode::UnreachableCodeReached);
|
||||
}
|
||||
@ -723,11 +726,8 @@ fn codegen_stmt<'tcx>(
|
||||
}
|
||||
Rvalue::Repeat(ref operand, times) => {
|
||||
let operand = codegen_operand(fx, operand);
|
||||
let times = fx
|
||||
.monomorphize(times)
|
||||
.eval(fx.tcx, ParamEnv::reveal_all())
|
||||
.try_to_bits(fx.tcx.data_layout.pointer_size)
|
||||
.unwrap();
|
||||
let times =
|
||||
fx.monomorphize(times).eval_target_usize(fx.tcx, ParamEnv::reveal_all());
|
||||
if operand.layout().size.bytes() == 0 {
|
||||
// Do nothing for ZST's
|
||||
} else if fx.clif_type(operand.layout().ty) == Some(types::I8) {
|
||||
@ -875,7 +875,8 @@ pub(crate) fn codegen_place<'tcx>(
|
||||
PlaceElem::Deref => {
|
||||
cplace = cplace.place_deref(fx);
|
||||
}
|
||||
PlaceElem::OpaqueCast(ty) => cplace = cplace.place_opaque_cast(fx, ty),
|
||||
PlaceElem::OpaqueCast(ty) => bug!("encountered OpaqueCast({ty}) in codegen"),
|
||||
PlaceElem::Subtype(ty) => cplace = cplace.place_transmute_type(fx, fx.monomorphize(ty)),
|
||||
PlaceElem::Field(field, _ty) => {
|
||||
cplace = cplace.place_field(fx, field);
|
||||
}
|
||||
@ -971,13 +972,14 @@ pub(crate) fn codegen_panic_nounwind<'tcx>(
|
||||
codegen_panic_inner(fx, rustc_hir::LangItem::PanicNounwind, &args, source_info.span);
|
||||
}
|
||||
|
||||
pub(crate) fn codegen_panic_cannot_unwind<'tcx>(
|
||||
pub(crate) fn codegen_unwind_terminate<'tcx>(
|
||||
fx: &mut FunctionCx<'_, '_, 'tcx>,
|
||||
source_info: mir::SourceInfo,
|
||||
reason: UnwindTerminateReason,
|
||||
) {
|
||||
let args = [];
|
||||
|
||||
codegen_panic_inner(fx, rustc_hir::LangItem::PanicCannotUnwind, &args, source_info.span);
|
||||
codegen_panic_inner(fx, reason.lang_item(), &args, source_info.span);
|
||||
}
|
||||
|
||||
fn codegen_panic_inner<'tcx>(
|
||||
|
||||
@ -359,7 +359,7 @@ impl<'tcx> FunctionCx<'_, '_, 'tcx> {
|
||||
where
|
||||
T: TypeFoldable<TyCtxt<'tcx>> + Copy,
|
||||
{
|
||||
self.instance.subst_mir_and_normalize_erasing_regions(
|
||||
self.instance.instantiate_mir_and_normalize_erasing_regions(
|
||||
self.tcx,
|
||||
ty::ParamEnv::reveal_all(),
|
||||
ty::EarlyBinder::bind(value),
|
||||
@ -480,7 +480,7 @@ impl<'tcx> LayoutOfHelpers<'tcx> for RevealAllLayoutCx<'tcx> {
|
||||
if let LayoutError::SizeOverflow(_) | LayoutError::ReferencesError(_) = err {
|
||||
self.0.sess.span_fatal(span, err.to_string())
|
||||
} else {
|
||||
span_bug!(span, "failed to get layout for `{}`: {}", ty, err)
|
||||
self.0.sess.span_fatal(span, format!("failed to get layout for `{}`: {}", ty, err))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -2,9 +2,8 @@
|
||||
|
||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
||||
use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags;
|
||||
use rustc_middle::mir::interpret::{
|
||||
read_target_uint, AllocId, ConstAllocation, ConstValue, ErrorHandled, GlobalAlloc, Scalar,
|
||||
};
|
||||
use rustc_middle::mir::interpret::{read_target_uint, AllocId, GlobalAlloc, Scalar};
|
||||
use rustc_middle::mir::ConstValue;
|
||||
|
||||
use cranelift_module::*;
|
||||
|
||||
@ -33,16 +32,6 @@ impl ConstantCx {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn check_constants(fx: &mut FunctionCx<'_, '_, '_>) -> bool {
|
||||
let mut all_constants_ok = true;
|
||||
for constant in &fx.mir.required_consts {
|
||||
if eval_mir_constant(fx, constant).is_none() {
|
||||
all_constants_ok = false;
|
||||
}
|
||||
}
|
||||
all_constants_ok
|
||||
}
|
||||
|
||||
pub(crate) fn codegen_static(tcx: TyCtxt<'_>, module: &mut dyn Module, def_id: DefId) {
|
||||
let mut constants_cx = ConstantCx::new();
|
||||
constants_cx.todo.push(TodoItem::Static(def_id));
|
||||
@ -75,53 +64,21 @@ pub(crate) fn codegen_tls_ref<'tcx>(
|
||||
|
||||
pub(crate) fn eval_mir_constant<'tcx>(
|
||||
fx: &FunctionCx<'_, '_, 'tcx>,
|
||||
constant: &Constant<'tcx>,
|
||||
) -> Option<(ConstValue<'tcx>, Ty<'tcx>)> {
|
||||
let constant_kind = fx.monomorphize(constant.literal);
|
||||
let uv = match constant_kind {
|
||||
ConstantKind::Ty(const_) => match const_.kind() {
|
||||
ty::ConstKind::Unevaluated(uv) => uv.expand(),
|
||||
ty::ConstKind::Value(val) => {
|
||||
return Some((fx.tcx.valtree_to_const_val((const_.ty(), val)), const_.ty()));
|
||||
}
|
||||
err => span_bug!(
|
||||
constant.span,
|
||||
"encountered bad ConstKind after monomorphizing: {:?}",
|
||||
err
|
||||
),
|
||||
},
|
||||
ConstantKind::Unevaluated(mir::UnevaluatedConst { def, .. }, _)
|
||||
if fx.tcx.is_static(def) =>
|
||||
{
|
||||
span_bug!(constant.span, "MIR constant refers to static");
|
||||
}
|
||||
ConstantKind::Unevaluated(uv, _) => uv,
|
||||
ConstantKind::Val(val, _) => return Some((val, constant_kind.ty())),
|
||||
};
|
||||
|
||||
let val = fx
|
||||
.tcx
|
||||
.const_eval_resolve(ty::ParamEnv::reveal_all(), uv, None)
|
||||
.map_err(|err| match err {
|
||||
ErrorHandled::Reported(_) => {
|
||||
fx.tcx.sess.span_err(constant.span, "erroneous constant encountered");
|
||||
}
|
||||
ErrorHandled::TooGeneric => {
|
||||
span_bug!(constant.span, "codegen encountered polymorphic constant: {:?}", err);
|
||||
}
|
||||
})
|
||||
.ok();
|
||||
val.map(|val| (val, constant_kind.ty()))
|
||||
constant: &ConstOperand<'tcx>,
|
||||
) -> (ConstValue<'tcx>, Ty<'tcx>) {
|
||||
let cv = fx.monomorphize(constant.const_);
|
||||
// This cannot fail because we checked all required_consts in advance.
|
||||
let val = cv
|
||||
.eval(fx.tcx, ty::ParamEnv::reveal_all(), Some(constant.span))
|
||||
.expect("erroneous constant not captured by required_consts");
|
||||
(val, cv.ty())
|
||||
}
|
||||
|
||||
pub(crate) fn codegen_constant_operand<'tcx>(
|
||||
fx: &mut FunctionCx<'_, '_, 'tcx>,
|
||||
constant: &Constant<'tcx>,
|
||||
constant: &ConstOperand<'tcx>,
|
||||
) -> CValue<'tcx> {
|
||||
let (const_val, ty) = eval_mir_constant(fx, constant).unwrap_or_else(|| {
|
||||
span_bug!(constant.span, "erroneous constant not captured by required_consts")
|
||||
});
|
||||
|
||||
let (const_val, ty) = eval_mir_constant(fx, constant);
|
||||
codegen_const_value(fx, const_val, ty)
|
||||
}
|
||||
|
||||
@ -138,7 +95,7 @@ pub(crate) fn codegen_const_value<'tcx>(
|
||||
}
|
||||
|
||||
match const_val {
|
||||
ConstValue::ZeroSized => unreachable!(), // we already handles ZST above
|
||||
ConstValue::ZeroSized => unreachable!(), // we already handled ZST above
|
||||
ConstValue::Scalar(x) => match x {
|
||||
Scalar::Int(int) => {
|
||||
if fx.clif_type(layout.ty).is_some() {
|
||||
@ -222,19 +179,16 @@ pub(crate) fn codegen_const_value<'tcx>(
|
||||
CValue::by_val(val, layout)
|
||||
}
|
||||
},
|
||||
ConstValue::ByRef { alloc, offset } => CValue::by_ref(
|
||||
pointer_for_allocation(fx, alloc)
|
||||
ConstValue::Indirect { alloc_id, offset } => CValue::by_ref(
|
||||
pointer_for_allocation(fx, alloc_id)
|
||||
.offset_i64(fx, i64::try_from(offset.bytes()).unwrap()),
|
||||
layout,
|
||||
),
|
||||
ConstValue::Slice { data, start, end } => {
|
||||
let ptr = pointer_for_allocation(fx, data)
|
||||
.offset_i64(fx, i64::try_from(start).unwrap())
|
||||
.get_addr(fx);
|
||||
let len = fx
|
||||
.bcx
|
||||
.ins()
|
||||
.iconst(fx.pointer_type, i64::try_from(end.checked_sub(start).unwrap()).unwrap());
|
||||
ConstValue::Slice { data, meta } => {
|
||||
let alloc_id = fx.tcx.reserve_and_set_memory_alloc(data);
|
||||
let ptr = pointer_for_allocation(fx, alloc_id).get_addr(fx);
|
||||
// FIXME: the `try_from` here can actually fail, e.g. for very long ZST slices.
|
||||
let len = fx.bcx.ins().iconst(fx.pointer_type, i64::try_from(meta).unwrap());
|
||||
CValue::by_val_pair(ptr, len, layout)
|
||||
}
|
||||
}
|
||||
@ -242,9 +196,9 @@ pub(crate) fn codegen_const_value<'tcx>(
|
||||
|
||||
fn pointer_for_allocation<'tcx>(
|
||||
fx: &mut FunctionCx<'_, '_, 'tcx>,
|
||||
alloc: ConstAllocation<'tcx>,
|
||||
alloc_id: AllocId,
|
||||
) -> crate::pointer::Pointer {
|
||||
let alloc_id = fx.tcx.create_memory_alloc(alloc);
|
||||
let alloc = fx.tcx.global_alloc(alloc_id).unwrap_memory();
|
||||
let data_id = data_id_for_alloc_id(
|
||||
&mut fx.constants_cx,
|
||||
&mut *fx.module,
|
||||
@ -375,6 +329,7 @@ fn define_all_allocs(tcx: TyCtxt<'_>, module: &mut dyn Module, cx: &mut Constant
|
||||
unreachable!()
|
||||
}
|
||||
};
|
||||
// FIXME: should we have a cache so we don't do this multiple times for the same `ConstAllocation`?
|
||||
let data_id = *cx.anon_allocs.entry(alloc_id).or_insert_with(|| {
|
||||
module.declare_anonymous_data(alloc.inner().mutability.is_mut(), false).unwrap()
|
||||
});
|
||||
@ -479,7 +434,7 @@ pub(crate) fn mir_operand_get_const_val<'tcx>(
|
||||
operand: &Operand<'tcx>,
|
||||
) -> Option<ConstValue<'tcx>> {
|
||||
match operand {
|
||||
Operand::Constant(const_) => Some(eval_mir_constant(fx, const_).unwrap().0),
|
||||
Operand::Constant(const_) => Some(eval_mir_constant(fx, const_).0),
|
||||
// FIXME(rust-lang/rust#85105): Casts like `IMM8 as u32` result in the const being stored
|
||||
// inside a temporary before being passed to the intrinsic requiring the const argument.
|
||||
// This code tries to find a single constant defining definition of the referenced local.
|
||||
@ -550,8 +505,8 @@ pub(crate) fn mir_operand_get_const_val<'tcx>(
|
||||
match &bb_data.terminator().kind {
|
||||
TerminatorKind::Goto { .. }
|
||||
| TerminatorKind::SwitchInt { .. }
|
||||
| TerminatorKind::Resume
|
||||
| TerminatorKind::Terminate
|
||||
| TerminatorKind::UnwindResume
|
||||
| TerminatorKind::UnwindTerminate(_)
|
||||
| TerminatorKind::Return
|
||||
| TerminatorKind::Unreachable
|
||||
| TerminatorKind::Drop { .. }
|
||||
|
||||
@ -81,13 +81,10 @@ impl DebugContext {
|
||||
|
||||
match tcx.sess.source_map().lookup_line(span.lo()) {
|
||||
Ok(SourceFileAndLine { sf: file, line }) => {
|
||||
let line_pos = file.lines(|lines| lines[line]);
|
||||
let line_pos = file.lines()[line];
|
||||
let col = file.relative_position(span.lo()) - line_pos;
|
||||
|
||||
(
|
||||
file,
|
||||
u64::try_from(line).unwrap() + 1,
|
||||
u64::from((span.lo() - line_pos).to_u32()) + 1,
|
||||
)
|
||||
(file, u64::try_from(line).unwrap() + 1, u64::from(col.to_u32()) + 1)
|
||||
}
|
||||
Err(file) => (file, 0, 0),
|
||||
}
|
||||
|
||||
@ -269,7 +269,7 @@ fn module_codegen(
|
||||
),
|
||||
) -> OngoingModuleCodegen {
|
||||
let (cgu_name, mut cx, mut module, codegened_functions) =
|
||||
tcx.prof.verbose_generic_activity_with_arg("codegen cgu", cgu_name.as_str()).run(|| {
|
||||
tcx.prof.generic_activity_with_arg("codegen cgu", cgu_name.as_str()).run(|| {
|
||||
let cgu = tcx.codegen_unit(cgu_name);
|
||||
let mono_items = cgu.items_in_deterministic_order(tcx);
|
||||
|
||||
@ -322,35 +322,24 @@ fn module_codegen(
|
||||
});
|
||||
|
||||
OngoingModuleCodegen::Async(std::thread::spawn(move || {
|
||||
cx.profiler.clone().verbose_generic_activity_with_arg("compile functions", &*cgu_name).run(
|
||||
|| {
|
||||
cranelift_codegen::timing::set_thread_profiler(Box::new(super::MeasuremeProfiler(
|
||||
cx.profiler.clone(),
|
||||
)));
|
||||
cx.profiler.clone().generic_activity_with_arg("compile functions", &*cgu_name).run(|| {
|
||||
cranelift_codegen::timing::set_thread_profiler(Box::new(super::MeasuremeProfiler(
|
||||
cx.profiler.clone(),
|
||||
)));
|
||||
|
||||
let mut cached_context = Context::new();
|
||||
for codegened_func in codegened_functions {
|
||||
crate::base::compile_fn(
|
||||
&mut cx,
|
||||
&mut cached_context,
|
||||
&mut module,
|
||||
codegened_func,
|
||||
);
|
||||
}
|
||||
},
|
||||
);
|
||||
let mut cached_context = Context::new();
|
||||
for codegened_func in codegened_functions {
|
||||
crate::base::compile_fn(&mut cx, &mut cached_context, &mut module, codegened_func);
|
||||
}
|
||||
});
|
||||
|
||||
let global_asm_object_file = cx
|
||||
.profiler
|
||||
.verbose_generic_activity_with_arg("compile assembly", &*cgu_name)
|
||||
.run(|| {
|
||||
let global_asm_object_file =
|
||||
cx.profiler.generic_activity_with_arg("compile assembly", &*cgu_name).run(|| {
|
||||
crate::global_asm::compile_global_asm(&global_asm_config, &cgu_name, &cx.global_asm)
|
||||
})?;
|
||||
|
||||
let codegen_result = cx
|
||||
.profiler
|
||||
.verbose_generic_activity_with_arg("write object file", &*cgu_name)
|
||||
.run(|| {
|
||||
let codegen_result =
|
||||
cx.profiler.generic_activity_with_arg("write object file", &*cgu_name).run(|| {
|
||||
emit_cgu(
|
||||
&global_asm_config.output_filenames,
|
||||
&cx.profiler,
|
||||
|
||||
@ -242,8 +242,7 @@ pub(crate) fn codegen_inline_asm<'tcx>(
|
||||
}
|
||||
}
|
||||
InlineAsmOperand::Const { ref value } => {
|
||||
let (const_value, ty) = crate::constant::eval_mir_constant(fx, value)
|
||||
.unwrap_or_else(|| span_bug!(span, "asm const cannot be resolved"));
|
||||
let (const_value, ty) = crate::constant::eval_mir_constant(fx, value);
|
||||
let value = rustc_codegen_ssa::common::asm_const_to_str(
|
||||
fx.tcx,
|
||||
span,
|
||||
@ -253,8 +252,8 @@ pub(crate) fn codegen_inline_asm<'tcx>(
|
||||
CInlineAsmOperand::Const { value }
|
||||
}
|
||||
InlineAsmOperand::SymFn { ref value } => {
|
||||
let literal = fx.monomorphize(value.literal);
|
||||
if let ty::FnDef(def_id, args) = *literal.ty().kind() {
|
||||
let const_ = fx.monomorphize(value.const_);
|
||||
if let ty::FnDef(def_id, args) = *const_.ty().kind() {
|
||||
let instance = ty::Instance::resolve_for_fn_ptr(
|
||||
fx.tcx,
|
||||
ty::ParamEnv::reveal_all(),
|
||||
|
||||
@ -177,244 +177,6 @@ pub(crate) fn codegen_x86_llvm_intrinsic_call<'tcx>(
|
||||
bool_to_zero_or_max_uint(fx, res_lane_ty, res_lane)
|
||||
});
|
||||
}
|
||||
"llvm.x86.sse2.psrli.d" => {
|
||||
let (a, imm8) = match args {
|
||||
[a, imm8] => (a, imm8),
|
||||
_ => bug!("wrong number of args for intrinsic {intrinsic}"),
|
||||
};
|
||||
let a = codegen_operand(fx, a);
|
||||
let imm8 = crate::constant::mir_operand_get_const_val(fx, imm8)
|
||||
.expect("llvm.x86.sse2.psrli.d imm8 not const");
|
||||
|
||||
simd_for_each_lane(fx, a, ret, &|fx, _lane_ty, _res_lane_ty, lane| match imm8
|
||||
.try_to_bits(Size::from_bytes(4))
|
||||
.unwrap_or_else(|| panic!("imm8 not scalar: {:?}", imm8))
|
||||
{
|
||||
imm8 if imm8 < 32 => fx.bcx.ins().ushr_imm(lane, i64::from(imm8 as u8)),
|
||||
_ => fx.bcx.ins().iconst(types::I32, 0),
|
||||
});
|
||||
}
|
||||
"llvm.x86.sse2.psrai.d" => {
|
||||
let (a, imm8) = match args {
|
||||
[a, imm8] => (a, imm8),
|
||||
_ => bug!("wrong number of args for intrinsic {intrinsic}"),
|
||||
};
|
||||
let a = codegen_operand(fx, a);
|
||||
let imm8 = crate::constant::mir_operand_get_const_val(fx, imm8)
|
||||
.expect("llvm.x86.sse2.psrai.d imm8 not const");
|
||||
|
||||
simd_for_each_lane(fx, a, ret, &|fx, _lane_ty, _res_lane_ty, lane| match imm8
|
||||
.try_to_bits(Size::from_bytes(4))
|
||||
.unwrap_or_else(|| panic!("imm8 not scalar: {:?}", imm8))
|
||||
{
|
||||
imm8 if imm8 < 32 => fx.bcx.ins().sshr_imm(lane, i64::from(imm8 as u8)),
|
||||
_ => fx.bcx.ins().iconst(types::I32, 0),
|
||||
});
|
||||
}
|
||||
"llvm.x86.sse2.pslli.d" => {
|
||||
let (a, imm8) = match args {
|
||||
[a, imm8] => (a, imm8),
|
||||
_ => bug!("wrong number of args for intrinsic {intrinsic}"),
|
||||
};
|
||||
let a = codegen_operand(fx, a);
|
||||
let imm8 = crate::constant::mir_operand_get_const_val(fx, imm8)
|
||||
.expect("llvm.x86.sse2.pslli.d imm8 not const");
|
||||
|
||||
simd_for_each_lane(fx, a, ret, &|fx, _lane_ty, _res_lane_ty, lane| match imm8
|
||||
.try_to_bits(Size::from_bytes(4))
|
||||
.unwrap_or_else(|| panic!("imm8 not scalar: {:?}", imm8))
|
||||
{
|
||||
imm8 if imm8 < 32 => fx.bcx.ins().ishl_imm(lane, i64::from(imm8 as u8)),
|
||||
_ => fx.bcx.ins().iconst(types::I32, 0),
|
||||
});
|
||||
}
|
||||
"llvm.x86.sse2.psrli.w" => {
|
||||
let (a, imm8) = match args {
|
||||
[a, imm8] => (a, imm8),
|
||||
_ => bug!("wrong number of args for intrinsic {intrinsic}"),
|
||||
};
|
||||
let a = codegen_operand(fx, a);
|
||||
let imm8 = crate::constant::mir_operand_get_const_val(fx, imm8)
|
||||
.expect("llvm.x86.sse2.psrli.d imm8 not const");
|
||||
|
||||
simd_for_each_lane(fx, a, ret, &|fx, _lane_ty, _res_lane_ty, lane| match imm8
|
||||
.try_to_bits(Size::from_bytes(4))
|
||||
.unwrap_or_else(|| panic!("imm8 not scalar: {:?}", imm8))
|
||||
{
|
||||
imm8 if imm8 < 16 => fx.bcx.ins().ushr_imm(lane, i64::from(imm8 as u8)),
|
||||
_ => fx.bcx.ins().iconst(types::I32, 0),
|
||||
});
|
||||
}
|
||||
"llvm.x86.sse2.psrai.w" => {
|
||||
let (a, imm8) = match args {
|
||||
[a, imm8] => (a, imm8),
|
||||
_ => bug!("wrong number of args for intrinsic {intrinsic}"),
|
||||
};
|
||||
let a = codegen_operand(fx, a);
|
||||
let imm8 = crate::constant::mir_operand_get_const_val(fx, imm8)
|
||||
.expect("llvm.x86.sse2.psrai.d imm8 not const");
|
||||
|
||||
simd_for_each_lane(fx, a, ret, &|fx, _lane_ty, _res_lane_ty, lane| match imm8
|
||||
.try_to_bits(Size::from_bytes(4))
|
||||
.unwrap_or_else(|| panic!("imm8 not scalar: {:?}", imm8))
|
||||
{
|
||||
imm8 if imm8 < 16 => fx.bcx.ins().sshr_imm(lane, i64::from(imm8 as u8)),
|
||||
_ => fx.bcx.ins().iconst(types::I32, 0),
|
||||
});
|
||||
}
|
||||
"llvm.x86.sse2.pslli.w" => {
|
||||
let (a, imm8) = match args {
|
||||
[a, imm8] => (a, imm8),
|
||||
_ => bug!("wrong number of args for intrinsic {intrinsic}"),
|
||||
};
|
||||
let a = codegen_operand(fx, a);
|
||||
let imm8 = crate::constant::mir_operand_get_const_val(fx, imm8)
|
||||
.expect("llvm.x86.sse2.pslli.d imm8 not const");
|
||||
|
||||
simd_for_each_lane(fx, a, ret, &|fx, _lane_ty, _res_lane_ty, lane| match imm8
|
||||
.try_to_bits(Size::from_bytes(4))
|
||||
.unwrap_or_else(|| panic!("imm8 not scalar: {:?}", imm8))
|
||||
{
|
||||
imm8 if imm8 < 16 => fx.bcx.ins().ishl_imm(lane, i64::from(imm8 as u8)),
|
||||
_ => fx.bcx.ins().iconst(types::I32, 0),
|
||||
});
|
||||
}
|
||||
"llvm.x86.avx.psrli.d" => {
|
||||
let (a, imm8) = match args {
|
||||
[a, imm8] => (a, imm8),
|
||||
_ => bug!("wrong number of args for intrinsic {intrinsic}"),
|
||||
};
|
||||
let a = codegen_operand(fx, a);
|
||||
let imm8 = crate::constant::mir_operand_get_const_val(fx, imm8)
|
||||
.expect("llvm.x86.avx.psrli.d imm8 not const");
|
||||
|
||||
simd_for_each_lane(fx, a, ret, &|fx, _lane_ty, _res_lane_ty, lane| match imm8
|
||||
.try_to_bits(Size::from_bytes(4))
|
||||
.unwrap_or_else(|| panic!("imm8 not scalar: {:?}", imm8))
|
||||
{
|
||||
imm8 if imm8 < 32 => fx.bcx.ins().ushr_imm(lane, i64::from(imm8 as u8)),
|
||||
_ => fx.bcx.ins().iconst(types::I32, 0),
|
||||
});
|
||||
}
|
||||
"llvm.x86.avx.psrai.d" => {
|
||||
let (a, imm8) = match args {
|
||||
[a, imm8] => (a, imm8),
|
||||
_ => bug!("wrong number of args for intrinsic {intrinsic}"),
|
||||
};
|
||||
let a = codegen_operand(fx, a);
|
||||
let imm8 = crate::constant::mir_operand_get_const_val(fx, imm8)
|
||||
.expect("llvm.x86.avx.psrai.d imm8 not const");
|
||||
|
||||
simd_for_each_lane(fx, a, ret, &|fx, _lane_ty, _res_lane_ty, lane| match imm8
|
||||
.try_to_bits(Size::from_bytes(4))
|
||||
.unwrap_or_else(|| panic!("imm8 not scalar: {:?}", imm8))
|
||||
{
|
||||
imm8 if imm8 < 32 => fx.bcx.ins().sshr_imm(lane, i64::from(imm8 as u8)),
|
||||
_ => fx.bcx.ins().iconst(types::I32, 0),
|
||||
});
|
||||
}
|
||||
"llvm.x86.sse2.psrli.q" => {
|
||||
let (a, imm8) = match args {
|
||||
[a, imm8] => (a, imm8),
|
||||
_ => bug!("wrong number of args for intrinsic {intrinsic}"),
|
||||
};
|
||||
let a = codegen_operand(fx, a);
|
||||
let imm8 = crate::constant::mir_operand_get_const_val(fx, imm8)
|
||||
.expect("llvm.x86.avx.psrli.q imm8 not const");
|
||||
|
||||
simd_for_each_lane(fx, a, ret, &|fx, _lane_ty, _res_lane_ty, lane| match imm8
|
||||
.try_to_bits(Size::from_bytes(4))
|
||||
.unwrap_or_else(|| panic!("imm8 not scalar: {:?}", imm8))
|
||||
{
|
||||
imm8 if imm8 < 64 => fx.bcx.ins().ushr_imm(lane, i64::from(imm8 as u8)),
|
||||
_ => fx.bcx.ins().iconst(types::I32, 0),
|
||||
});
|
||||
}
|
||||
"llvm.x86.sse2.pslli.q" => {
|
||||
let (a, imm8) = match args {
|
||||
[a, imm8] => (a, imm8),
|
||||
_ => bug!("wrong number of args for intrinsic {intrinsic}"),
|
||||
};
|
||||
let a = codegen_operand(fx, a);
|
||||
let imm8 = crate::constant::mir_operand_get_const_val(fx, imm8)
|
||||
.expect("llvm.x86.avx.pslli.q imm8 not const");
|
||||
|
||||
simd_for_each_lane(fx, a, ret, &|fx, _lane_ty, _res_lane_ty, lane| match imm8
|
||||
.try_to_bits(Size::from_bytes(4))
|
||||
.unwrap_or_else(|| panic!("imm8 not scalar: {:?}", imm8))
|
||||
{
|
||||
imm8 if imm8 < 64 => fx.bcx.ins().ishl_imm(lane, i64::from(imm8 as u8)),
|
||||
_ => fx.bcx.ins().iconst(types::I32, 0),
|
||||
});
|
||||
}
|
||||
"llvm.x86.avx.pslli.d" => {
|
||||
let (a, imm8) = match args {
|
||||
[a, imm8] => (a, imm8),
|
||||
_ => bug!("wrong number of args for intrinsic {intrinsic}"),
|
||||
};
|
||||
let a = codegen_operand(fx, a);
|
||||
let imm8 = crate::constant::mir_operand_get_const_val(fx, imm8)
|
||||
.expect("llvm.x86.avx.pslli.d imm8 not const");
|
||||
|
||||
simd_for_each_lane(fx, a, ret, &|fx, _lane_ty, _res_lane_ty, lane| match imm8
|
||||
.try_to_bits(Size::from_bytes(4))
|
||||
.unwrap_or_else(|| panic!("imm8 not scalar: {:?}", imm8))
|
||||
{
|
||||
imm8 if imm8 < 32 => fx.bcx.ins().ishl_imm(lane, i64::from(imm8 as u8)),
|
||||
_ => fx.bcx.ins().iconst(types::I32, 0),
|
||||
});
|
||||
}
|
||||
"llvm.x86.avx2.psrli.w" => {
|
||||
let (a, imm8) = match args {
|
||||
[a, imm8] => (a, imm8),
|
||||
_ => bug!("wrong number of args for intrinsic {intrinsic}"),
|
||||
};
|
||||
let a = codegen_operand(fx, a);
|
||||
let imm8 = crate::constant::mir_operand_get_const_val(fx, imm8)
|
||||
.expect("llvm.x86.avx.psrli.w imm8 not const");
|
||||
|
||||
simd_for_each_lane(fx, a, ret, &|fx, _lane_ty, _res_lane_ty, lane| match imm8
|
||||
.try_to_bits(Size::from_bytes(4))
|
||||
.unwrap_or_else(|| panic!("imm8 not scalar: {:?}", imm8))
|
||||
{
|
||||
imm8 if imm8 < 16 => fx.bcx.ins().ushr_imm(lane, i64::from(imm8 as u8)),
|
||||
_ => fx.bcx.ins().iconst(types::I32, 0),
|
||||
});
|
||||
}
|
||||
"llvm.x86.avx2.psrai.w" => {
|
||||
let (a, imm8) = match args {
|
||||
[a, imm8] => (a, imm8),
|
||||
_ => bug!("wrong number of args for intrinsic {intrinsic}"),
|
||||
};
|
||||
let a = codegen_operand(fx, a);
|
||||
let imm8 = crate::constant::mir_operand_get_const_val(fx, imm8)
|
||||
.expect("llvm.x86.avx.psrai.w imm8 not const");
|
||||
|
||||
simd_for_each_lane(fx, a, ret, &|fx, _lane_ty, _res_lane_ty, lane| match imm8
|
||||
.try_to_bits(Size::from_bytes(4))
|
||||
.unwrap_or_else(|| panic!("imm8 not scalar: {:?}", imm8))
|
||||
{
|
||||
imm8 if imm8 < 16 => fx.bcx.ins().sshr_imm(lane, i64::from(imm8 as u8)),
|
||||
_ => fx.bcx.ins().iconst(types::I32, 0),
|
||||
});
|
||||
}
|
||||
"llvm.x86.avx2.pslli.w" => {
|
||||
let (a, imm8) = match args {
|
||||
[a, imm8] => (a, imm8),
|
||||
_ => bug!("wrong number of args for intrinsic {intrinsic}"),
|
||||
};
|
||||
let a = codegen_operand(fx, a);
|
||||
let imm8 = crate::constant::mir_operand_get_const_val(fx, imm8)
|
||||
.expect("llvm.x86.avx.pslli.w imm8 not const");
|
||||
|
||||
simd_for_each_lane(fx, a, ret, &|fx, _lane_ty, _res_lane_ty, lane| match imm8
|
||||
.try_to_bits(Size::from_bytes(4))
|
||||
.unwrap_or_else(|| panic!("imm8 not scalar: {:?}", imm8))
|
||||
{
|
||||
imm8 if imm8 < 16 => fx.bcx.ins().ishl_imm(lane, i64::from(imm8 as u8)),
|
||||
_ => fx.bcx.ins().iconst(types::I32, 0),
|
||||
});
|
||||
}
|
||||
"llvm.x86.ssse3.pshuf.b.128" | "llvm.x86.avx2.pshuf.b" => {
|
||||
let (a, b) = match args {
|
||||
[a, b] => (a, b),
|
||||
@ -506,14 +268,6 @@ pub(crate) fn codegen_x86_llvm_intrinsic_call<'tcx>(
|
||||
ret.place_lane(fx, 2).to_ptr().store(fx, res_2, MemFlags::trusted());
|
||||
ret.place_lane(fx, 3).to_ptr().store(fx, res_3, MemFlags::trusted());
|
||||
}
|
||||
"llvm.x86.sse2.storeu.dq" | "llvm.x86.sse2.storeu.pd" => {
|
||||
intrinsic_args!(fx, args => (mem_addr, a); intrinsic);
|
||||
let mem_addr = mem_addr.load_scalar(fx);
|
||||
|
||||
// FIXME correctly handle the unalignment
|
||||
let dest = CPlace::for_ptr(Pointer::new(mem_addr), a.layout());
|
||||
dest.write_cvalue(fx, a);
|
||||
}
|
||||
"llvm.x86.ssse3.pabs.b.128" | "llvm.x86.ssse3.pabs.w.128" | "llvm.x86.ssse3.pabs.d.128" => {
|
||||
let a = match args {
|
||||
[a] => a,
|
||||
@ -571,8 +325,6 @@ pub(crate) fn codegen_x86_llvm_intrinsic_call<'tcx>(
|
||||
// llvm.x86.avx2.vperm2i128
|
||||
// llvm.x86.ssse3.pshuf.b.128
|
||||
// llvm.x86.avx2.pshuf.b
|
||||
// llvm.x86.avx2.psrli.w
|
||||
// llvm.x86.sse2.psrli.w
|
||||
|
||||
fn llvm_add_sub<'tcx>(
|
||||
fx: &mut FunctionCx<'_, '_, 'tcx>,
|
||||
|
||||
@ -21,7 +21,7 @@ fn report_simd_type_validation_error(
|
||||
pub(super) fn codegen_simd_intrinsic_call<'tcx>(
|
||||
fx: &mut FunctionCx<'_, '_, 'tcx>,
|
||||
intrinsic: Symbol,
|
||||
_args: GenericArgsRef<'tcx>,
|
||||
generic_args: GenericArgsRef<'tcx>,
|
||||
args: &[mir::Operand<'tcx>],
|
||||
ret: CPlace<'tcx>,
|
||||
target: BasicBlock,
|
||||
@ -117,6 +117,54 @@ pub(super) fn codegen_simd_intrinsic_call<'tcx>(
|
||||
});
|
||||
}
|
||||
|
||||
// simd_shuffle_generic<T, U, const I: &[u32]>(x: T, y: T) -> U
|
||||
sym::simd_shuffle_generic => {
|
||||
let [x, y] = args else {
|
||||
bug!("wrong number of args for intrinsic {intrinsic}");
|
||||
};
|
||||
let x = codegen_operand(fx, x);
|
||||
let y = codegen_operand(fx, y);
|
||||
|
||||
if !x.layout().ty.is_simd() {
|
||||
report_simd_type_validation_error(fx, intrinsic, span, x.layout().ty);
|
||||
return;
|
||||
}
|
||||
|
||||
let idx = generic_args[2]
|
||||
.expect_const()
|
||||
.eval(fx.tcx, ty::ParamEnv::reveal_all(), Some(span))
|
||||
.unwrap()
|
||||
.unwrap_branch();
|
||||
|
||||
assert_eq!(x.layout(), y.layout());
|
||||
let layout = x.layout();
|
||||
|
||||
let (lane_count, lane_ty) = layout.ty.simd_size_and_type(fx.tcx);
|
||||
let (ret_lane_count, ret_lane_ty) = ret.layout().ty.simd_size_and_type(fx.tcx);
|
||||
|
||||
assert_eq!(lane_ty, ret_lane_ty);
|
||||
assert_eq!(idx.len() as u64, ret_lane_count);
|
||||
|
||||
let total_len = lane_count * 2;
|
||||
|
||||
let indexes =
|
||||
idx.iter().map(|idx| idx.unwrap_leaf().try_to_u16().unwrap()).collect::<Vec<u16>>();
|
||||
|
||||
for &idx in &indexes {
|
||||
assert!(u64::from(idx) < total_len, "idx {} out of range 0..{}", idx, total_len);
|
||||
}
|
||||
|
||||
for (out_idx, in_idx) in indexes.into_iter().enumerate() {
|
||||
let in_lane = if u64::from(in_idx) < lane_count {
|
||||
x.value_lane(fx, in_idx.into())
|
||||
} else {
|
||||
y.value_lane(fx, u64::from(in_idx) - lane_count)
|
||||
};
|
||||
let out_lane = ret.place_lane(fx, u64::try_from(out_idx).unwrap());
|
||||
out_lane.write_cvalue(fx, in_lane);
|
||||
}
|
||||
}
|
||||
|
||||
// simd_shuffle<T, I, U>(x: T, y: T, idx: I) -> U
|
||||
sym::simd_shuffle => {
|
||||
let (x, y, idx) = match args {
|
||||
@ -172,7 +220,8 @@ pub(super) fn codegen_simd_intrinsic_call<'tcx>(
|
||||
.expect("simd_shuffle idx not const");
|
||||
|
||||
let idx_bytes = match idx_const {
|
||||
ConstValue::ByRef { alloc, offset } => {
|
||||
ConstValue::Indirect { alloc_id, offset } => {
|
||||
let alloc = fx.tcx.global_alloc(alloc_id).unwrap_memory();
|
||||
let size = Size::from_bytes(
|
||||
4 * ret_lane_count, /* size_of([u32; ret_lane_count]) */
|
||||
);
|
||||
|
||||
@ -88,7 +88,8 @@ fn unsize_ptr<'tcx>(
|
||||
let src_f = src_layout.field(fx, i);
|
||||
assert_eq!(src_layout.fields.offset(i).bytes(), 0);
|
||||
assert_eq!(dst_layout.fields.offset(i).bytes(), 0);
|
||||
if src_f.is_zst() {
|
||||
if src_f.is_1zst() {
|
||||
// We are looking for the one non-1-ZST field; this is not it.
|
||||
continue;
|
||||
}
|
||||
assert_eq!(src_layout.size, src_f.size);
|
||||
@ -151,6 +152,7 @@ pub(crate) fn coerce_unsized_into<'tcx>(
|
||||
let dst_f = dst.place_field(fx, FieldIdx::new(i));
|
||||
|
||||
if dst_f.layout().is_zst() {
|
||||
// No data here, nothing to copy/coerce.
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
@ -674,7 +674,9 @@ impl<'tcx> CPlace<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn place_opaque_cast(
|
||||
/// Used for `ProjectionElem::Subtype`, `ty` has to be monomorphized before
|
||||
/// passed on.
|
||||
pub(crate) fn place_transmute_type(
|
||||
self,
|
||||
fx: &mut FunctionCx<'_, '_, 'tcx>,
|
||||
ty: Ty<'tcx>,
|
||||
|
||||
@ -48,19 +48,12 @@ pub(crate) fn get_ptr_and_method_ref<'tcx>(
|
||||
) -> (Pointer, Value) {
|
||||
let (ptr, vtable) = 'block: {
|
||||
if let Abi::Scalar(_) = arg.layout().abi {
|
||||
'descend_newtypes: while !arg.layout().ty.is_unsafe_ptr() && !arg.layout().ty.is_ref() {
|
||||
for i in 0..arg.layout().fields.count() {
|
||||
let field = arg.value_field(fx, FieldIdx::new(i));
|
||||
if !field.layout().is_zst() {
|
||||
// we found the one non-zero-sized field that is allowed
|
||||
// now find *its* non-zero-sized field, or stop if it's a
|
||||
// pointer
|
||||
arg = field;
|
||||
continue 'descend_newtypes;
|
||||
}
|
||||
}
|
||||
|
||||
bug!("receiver has no non-zero-sized fields {:?}", arg);
|
||||
while !arg.layout().ty.is_unsafe_ptr() && !arg.layout().ty.is_ref() {
|
||||
let (idx, _) = arg
|
||||
.layout()
|
||||
.non_1zst_field(fx)
|
||||
.expect("not exactly one non-1-ZST field in a `DispatchFromDyn` type");
|
||||
arg = arg.value_field(fx, FieldIdx::new(idx));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -113,7 +113,7 @@ impl<'gcc, 'tcx> FnAbiGccExt<'gcc, 'tcx> for FnAbi<'tcx, Ty<'tcx>> {
|
||||
match self.ret.mode {
|
||||
PassMode::Ignore => cx.type_void(),
|
||||
PassMode::Direct(_) | PassMode::Pair(..) => self.ret.layout.immediate_gcc_type(cx),
|
||||
PassMode::Cast(ref cast, _) => cast.gcc_type(cx),
|
||||
PassMode::Cast { ref cast, .. } => cast.gcc_type(cx),
|
||||
PassMode::Indirect { .. } => {
|
||||
argument_tys.push(cx.type_ptr_to(self.ret.memory_ty(cx)));
|
||||
cx.type_void()
|
||||
@ -125,25 +125,25 @@ impl<'gcc, 'tcx> FnAbiGccExt<'gcc, 'tcx> for FnAbi<'tcx, Ty<'tcx>> {
|
||||
PassMode::Ignore => continue,
|
||||
PassMode::Direct(_) => arg.layout.immediate_gcc_type(cx),
|
||||
PassMode::Pair(..) => {
|
||||
argument_tys.push(arg.layout.scalar_pair_element_gcc_type(cx, 0, true));
|
||||
argument_tys.push(arg.layout.scalar_pair_element_gcc_type(cx, 1, true));
|
||||
argument_tys.push(arg.layout.scalar_pair_element_gcc_type(cx, 0));
|
||||
argument_tys.push(arg.layout.scalar_pair_element_gcc_type(cx, 1));
|
||||
continue;
|
||||
}
|
||||
PassMode::Indirect { extra_attrs: Some(_), .. } => {
|
||||
PassMode::Indirect { meta_attrs: Some(_), .. } => {
|
||||
unimplemented!();
|
||||
}
|
||||
PassMode::Cast(ref cast, pad_i32) => {
|
||||
PassMode::Cast { ref cast, pad_i32 } => {
|
||||
// add padding
|
||||
if pad_i32 {
|
||||
argument_tys.push(Reg::i32().gcc_type(cx));
|
||||
}
|
||||
cast.gcc_type(cx)
|
||||
}
|
||||
PassMode::Indirect { extra_attrs: None, on_stack: true, .. } => {
|
||||
PassMode::Indirect { meta_attrs: None, on_stack: true, .. } => {
|
||||
on_stack_param_indices.insert(argument_tys.len());
|
||||
arg.memory_ty(cx)
|
||||
},
|
||||
PassMode::Indirect { extra_attrs: None, on_stack: false, .. } => cx.type_ptr_to(arg.memory_ty(cx)),
|
||||
PassMode::Indirect { meta_attrs: None, on_stack: false, .. } => cx.type_ptr_to(arg.memory_ty(cx)),
|
||||
};
|
||||
argument_tys.push(arg_ty);
|
||||
}
|
||||
|
||||
@ -821,7 +821,7 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||
|
||||
let mut load = |i, scalar: &abi::Scalar, align| {
|
||||
let llptr = self.struct_gep(pair_type, place.llval, i as u64);
|
||||
let llty = place.layout.scalar_pair_element_gcc_type(self, i, false);
|
||||
let llty = place.layout.scalar_pair_element_gcc_type(self, i);
|
||||
let load = self.load(llty, llptr, align);
|
||||
scalar_load_metadata(self, load, scalar);
|
||||
if scalar.is_bool() { self.trunc(load, self.type_i1()) } else { load }
|
||||
|
||||
@ -100,7 +100,7 @@ pub fn get_fn<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, instance: Instance<'tcx>)
|
||||
// whether we are sharing generics or not. The important thing here is
|
||||
// that the visibility we apply to the declaration is the same one that
|
||||
// has been applied to the definition (wherever that definition may be).
|
||||
let is_generic = instance.args.non_erasable_generics().next().is_some();
|
||||
let is_generic = instance.args.non_erasable_generics(tcx, instance.def_id()).next().is_some();
|
||||
|
||||
if is_generic {
|
||||
// This is a monomorphization. Its expected visibility depends
|
||||
|
||||
@ -7,6 +7,7 @@ use rustc_codegen_ssa::traits::{
|
||||
BaseTypeMethods,
|
||||
MiscMethods,
|
||||
};
|
||||
use rustc_codegen_ssa::errors as ssa_errors;
|
||||
use rustc_data_structures::base_n;
|
||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
||||
use rustc_middle::span_bug;
|
||||
@ -479,7 +480,7 @@ impl<'gcc, 'tcx> LayoutOfHelpers<'tcx> for CodegenCx<'gcc, 'tcx> {
|
||||
if let LayoutError::SizeOverflow(_) | LayoutError::ReferencesError(_) = err {
|
||||
self.sess().emit_fatal(respan(span, err.into_diagnostic()))
|
||||
} else {
|
||||
span_bug!(span, "failed to get layout for `{}`: {}", ty, err)
|
||||
self.tcx.sess.emit_fatal(ssa_errors::FailedToGetLayout { span, ty, err })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -55,7 +55,7 @@ impl<'gcc, 'tcx> DebugInfoMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
|
||||
_fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
|
||||
_llfn: RValue<'gcc>,
|
||||
_mir: &mir::Body<'tcx>,
|
||||
) -> Option<FunctionDebugContext<Self::DIScope, Self::DILocation>> {
|
||||
) -> Option<FunctionDebugContext<'tcx, Self::DIScope, Self::DILocation>> {
|
||||
// TODO(antoyo)
|
||||
None
|
||||
}
|
||||
|
||||
@ -144,7 +144,7 @@ impl<'a, 'gcc, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||
sym::volatile_load | sym::unaligned_volatile_load => {
|
||||
let tp_ty = fn_args.type_at(0);
|
||||
let mut ptr = args[0].immediate();
|
||||
if let PassMode::Cast(ty, _) = &fn_abi.ret.mode {
|
||||
if let PassMode::Cast { cast: ty, .. } = &fn_abi.ret.mode {
|
||||
ptr = self.pointercast(ptr, self.type_ptr_to(ty.gcc_type(self)));
|
||||
}
|
||||
let load = self.volatile_load(ptr.get_type(), ptr);
|
||||
@ -353,7 +353,7 @@ impl<'a, 'gcc, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||
};
|
||||
|
||||
if !fn_abi.ret.is_ignore() {
|
||||
if let PassMode::Cast(ty, _) = &fn_abi.ret.mode {
|
||||
if let PassMode::Cast { cast: ty, .. } = &fn_abi.ret.mode {
|
||||
let ptr_llty = self.type_ptr_to(ty.gcc_type(self));
|
||||
let ptr = self.pointercast(result.llval, ptr_llty);
|
||||
self.store(llval, ptr, result.align);
|
||||
@ -449,7 +449,7 @@ impl<'gcc, 'tcx> ArgAbiExt<'gcc, 'tcx> for ArgAbi<'tcx, Ty<'tcx>> {
|
||||
else if self.is_unsized_indirect() {
|
||||
bug!("unsized `ArgAbi` must be handled through `store_fn_arg`");
|
||||
}
|
||||
else if let PassMode::Cast(ref cast, _) = self.mode {
|
||||
else if let PassMode::Cast { ref cast, .. } = self.mode {
|
||||
// FIXME(eddyb): Figure out when the simpler Store is safe, clang
|
||||
// uses it for i16 -> {i8, i8}, but not for i24 -> {i8, i8, i8}.
|
||||
let can_store_through_cast_ptr = false;
|
||||
@ -511,10 +511,10 @@ impl<'gcc, 'tcx> ArgAbiExt<'gcc, 'tcx> for ArgAbi<'tcx, Ty<'tcx>> {
|
||||
PassMode::Pair(..) => {
|
||||
OperandValue::Pair(next(), next()).store(bx, dst);
|
||||
},
|
||||
PassMode::Indirect { extra_attrs: Some(_), .. } => {
|
||||
PassMode::Indirect { meta_attrs: Some(_), .. } => {
|
||||
OperandValue::Ref(next(), Some(next()), self.layout.align.abi).store(bx, dst);
|
||||
},
|
||||
PassMode::Direct(_) | PassMode::Indirect { extra_attrs: None, .. } | PassMode::Cast(..) => {
|
||||
PassMode::Direct(_) | PassMode::Indirect { meta_attrs: None, .. } | PassMode::Cast { .. } => {
|
||||
let next_arg = next();
|
||||
self.store(bx, next_arg, dst);
|
||||
},
|
||||
|
||||
@ -80,7 +80,7 @@ use rustc_errors::{DiagnosticMessage, ErrorGuaranteed, Handler, SubdiagnosticMes
|
||||
use rustc_fluent_macro::fluent_messages;
|
||||
use rustc_metadata::EncodedMetadata;
|
||||
use rustc_middle::dep_graph::{WorkProduct, WorkProductId};
|
||||
use rustc_middle::query::Providers;
|
||||
use rustc_middle::util::Providers;
|
||||
use rustc_middle::ty::TyCtxt;
|
||||
use rustc_session::config::{Lto, OptLevel, OutputFilenames};
|
||||
use rustc_session::Session;
|
||||
|
||||
@ -4,7 +4,7 @@ use gccjit::{Struct, Type};
|
||||
use crate::rustc_codegen_ssa::traits::{BaseTypeMethods, DerivedTypeMethods, LayoutTypeMethods};
|
||||
use rustc_middle::bug;
|
||||
use rustc_middle::ty::{self, Ty, TypeVisitableExt};
|
||||
use rustc_middle::ty::layout::{FnAbiOf, LayoutOf, TyAndLayout};
|
||||
use rustc_middle::ty::layout::{LayoutOf, TyAndLayout};
|
||||
use rustc_middle::ty::print::with_no_trimmed_paths;
|
||||
use rustc_target::abi::{self, Abi, Align, F32, F64, FieldsShape, Int, Integer, Pointer, PointeeInfo, Size, TyAbiInterface, Variants};
|
||||
use rustc_target::abi::call::{CastTarget, FnAbi, Reg};
|
||||
@ -74,8 +74,8 @@ fn uncached_gcc_type<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, layout: TyAndLayout
|
||||
Abi::ScalarPair(..) => {
|
||||
return cx.type_struct(
|
||||
&[
|
||||
layout.scalar_pair_element_gcc_type(cx, 0, false),
|
||||
layout.scalar_pair_element_gcc_type(cx, 1, false),
|
||||
layout.scalar_pair_element_gcc_type(cx, 0),
|
||||
layout.scalar_pair_element_gcc_type(cx, 1),
|
||||
],
|
||||
false,
|
||||
);
|
||||
@ -150,7 +150,7 @@ pub trait LayoutGccExt<'tcx> {
|
||||
fn gcc_type<'gcc>(&self, cx: &CodegenCx<'gcc, 'tcx>) -> Type<'gcc>;
|
||||
fn immediate_gcc_type<'gcc>(&self, cx: &CodegenCx<'gcc, 'tcx>) -> Type<'gcc>;
|
||||
fn scalar_gcc_type_at<'gcc>(&self, cx: &CodegenCx<'gcc, 'tcx>, scalar: &abi::Scalar, offset: Size) -> Type<'gcc>;
|
||||
fn scalar_pair_element_gcc_type<'gcc>(&self, cx: &CodegenCx<'gcc, 'tcx>, index: usize, immediate: bool) -> Type<'gcc>;
|
||||
fn scalar_pair_element_gcc_type<'gcc>(&self, cx: &CodegenCx<'gcc, 'tcx>, index: usize) -> Type<'gcc>;
|
||||
fn gcc_field_index(&self, index: usize) -> u64;
|
||||
fn pointee_info_at<'gcc>(&self, cx: &CodegenCx<'gcc, 'tcx>, offset: Size) -> Option<PointeeInfo>;
|
||||
}
|
||||
@ -182,23 +182,16 @@ impl<'tcx> LayoutGccExt<'tcx> for TyAndLayout<'tcx> {
|
||||
/// of that field's type - this is useful for taking the address of
|
||||
/// that field and ensuring the struct has the right alignment.
|
||||
fn gcc_type<'gcc>(&self, cx: &CodegenCx<'gcc, 'tcx>) -> Type<'gcc> {
|
||||
// This must produce the same result for `repr(transparent)` wrappers as for the inner type!
|
||||
// In other words, this should generally not look at the type at all, but only at the
|
||||
// layout.
|
||||
if let Abi::Scalar(ref scalar) = self.abi {
|
||||
// Use a different cache for scalars because pointers to DSTs
|
||||
// can be either fat or thin (data pointers of fat pointers).
|
||||
if let Some(&ty) = cx.scalar_types.borrow().get(&self.ty) {
|
||||
return ty;
|
||||
}
|
||||
let ty =
|
||||
match *self.ty.kind() {
|
||||
ty::Ref(_, ty, _) | ty::RawPtr(ty::TypeAndMut { ty, .. }) => {
|
||||
cx.type_ptr_to(cx.layout_of(ty).gcc_type(cx))
|
||||
}
|
||||
ty::Adt(def, _) if def.is_box() => {
|
||||
cx.type_ptr_to(cx.layout_of(self.ty.boxed_ty()).gcc_type(cx))
|
||||
}
|
||||
ty::FnPtr(sig) => cx.fn_ptr_backend_type(&cx.fn_abi_of_fn_ptr(sig, ty::List::empty())),
|
||||
_ => self.scalar_gcc_type_at(cx, scalar, Size::ZERO),
|
||||
};
|
||||
let ty = self.scalar_gcc_type_at(cx, scalar, Size::ZERO);
|
||||
cx.scalar_types.borrow_mut().insert(self.ty, ty);
|
||||
return ty;
|
||||
}
|
||||
@ -272,23 +265,10 @@ impl<'tcx> LayoutGccExt<'tcx> for TyAndLayout<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
fn scalar_pair_element_gcc_type<'gcc>(&self, cx: &CodegenCx<'gcc, 'tcx>, index: usize, immediate: bool) -> Type<'gcc> {
|
||||
// TODO(antoyo): remove llvm hack:
|
||||
// HACK(eddyb) special-case fat pointers until LLVM removes
|
||||
// pointee types, to avoid bitcasting every `OperandRef::deref`.
|
||||
match self.ty.kind() {
|
||||
ty::Ref(..) | ty::RawPtr(_) => {
|
||||
return self.field(cx, index).gcc_type(cx);
|
||||
}
|
||||
// only wide pointer boxes are handled as pointers
|
||||
// thin pointer boxes with scalar allocators are handled by the general logic below
|
||||
ty::Adt(def, args) if def.is_box() && cx.layout_of(args.type_at(1)).is_zst() => {
|
||||
let ptr_ty = Ty::new_mut_ptr(cx.tcx,self.ty.boxed_ty());
|
||||
return cx.layout_of(ptr_ty).scalar_pair_element_gcc_type(cx, index, immediate);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
fn scalar_pair_element_gcc_type<'gcc>(&self, cx: &CodegenCx<'gcc, 'tcx>, index: usize) -> Type<'gcc> {
|
||||
// This must produce the same result for `repr(transparent)` wrappers as for the inner type!
|
||||
// In other words, this should generally not look at the type at all, but only at the
|
||||
// layout.
|
||||
let (a, b) = match self.abi {
|
||||
Abi::ScalarPair(ref a, ref b) => (a, b),
|
||||
_ => bug!("TyAndLayout::scalar_pair_element_llty({:?}): not applicable", self),
|
||||
@ -367,8 +347,8 @@ impl<'gcc, 'tcx> LayoutTypeMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
|
||||
layout.gcc_field_index(index)
|
||||
}
|
||||
|
||||
fn scalar_pair_element_backend_type(&self, layout: TyAndLayout<'tcx>, index: usize, immediate: bool) -> Type<'gcc> {
|
||||
layout.scalar_pair_element_gcc_type(self, index, immediate)
|
||||
fn scalar_pair_element_backend_type(&self, layout: TyAndLayout<'tcx>, index: usize, _immediate: bool) -> Type<'gcc> {
|
||||
layout.scalar_pair_element_gcc_type(self, index)
|
||||
}
|
||||
|
||||
fn cast_backend_type(&self, ty: &CastTarget) -> Type<'gcc> {
|
||||
|
||||
@ -37,6 +37,8 @@ codegen_llvm_lto_disallowed = lto can only be run for executables, cdylibs and s
|
||||
|
||||
codegen_llvm_lto_dylib = lto cannot be used for `dylib` crate type without `-Zdylib-lto`
|
||||
|
||||
codegen_llvm_lto_proc_macro = lto cannot be used for `proc-macro` crate type without `-Zdylib-lto`
|
||||
|
||||
codegen_llvm_missing_features =
|
||||
add the missing features in a `target_feature` attribute
|
||||
|
||||
@ -83,6 +85,8 @@ codegen_llvm_unknown_ctarget_feature_prefix =
|
||||
unknown feature specified for `-Ctarget-feature`: `{$feature}`
|
||||
.note = features must begin with a `+` to enable or `-` to disable it
|
||||
|
||||
codegen_llvm_unknown_debuginfo_compression = unknown debuginfo compression algorithm {$algorithm} - will fall back to uncompressed debuginfo
|
||||
|
||||
codegen_llvm_write_bytecode = failed to write bytecode to {$path}: {$err}
|
||||
|
||||
codegen_llvm_write_ir = failed to write LLVM IR to {$path}
|
||||
|
||||
@ -211,7 +211,7 @@ impl<'ll, 'tcx> ArgAbiExt<'ll, 'tcx> for ArgAbi<'tcx, Ty<'tcx>> {
|
||||
OperandValue::Ref(val, None, self.layout.align.abi).store(bx, dst)
|
||||
} else if self.is_unsized_indirect() {
|
||||
bug!("unsized `ArgAbi` must be handled through `store_fn_arg`");
|
||||
} else if let PassMode::Cast(cast, _) = &self.mode {
|
||||
} else if let PassMode::Cast { cast, pad_i32: _ } = &self.mode {
|
||||
// FIXME(eddyb): Figure out when the simpler Store is safe, clang
|
||||
// uses it for i16 -> {i8, i8}, but not for i24 -> {i8, i8, i8}.
|
||||
let can_store_through_cast_ptr = false;
|
||||
@ -274,12 +274,12 @@ impl<'ll, 'tcx> ArgAbiExt<'ll, 'tcx> for ArgAbi<'tcx, Ty<'tcx>> {
|
||||
PassMode::Pair(..) => {
|
||||
OperandValue::Pair(next(), next()).store(bx, dst);
|
||||
}
|
||||
PassMode::Indirect { attrs: _, extra_attrs: Some(_), on_stack: _ } => {
|
||||
PassMode::Indirect { attrs: _, meta_attrs: Some(_), on_stack: _ } => {
|
||||
OperandValue::Ref(next(), Some(next()), self.layout.align.abi).store(bx, dst);
|
||||
}
|
||||
PassMode::Direct(_)
|
||||
| PassMode::Indirect { attrs: _, extra_attrs: None, on_stack: _ }
|
||||
| PassMode::Cast(..) => {
|
||||
| PassMode::Indirect { attrs: _, meta_attrs: None, on_stack: _ }
|
||||
| PassMode::Cast { .. } => {
|
||||
let next_arg = next();
|
||||
self.store(bx, next_arg, dst);
|
||||
}
|
||||
@ -332,7 +332,7 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> {
|
||||
let llreturn_ty = match &self.ret.mode {
|
||||
PassMode::Ignore => cx.type_void(),
|
||||
PassMode::Direct(_) | PassMode::Pair(..) => self.ret.layout.immediate_llvm_type(cx),
|
||||
PassMode::Cast(cast, _) => cast.llvm_type(cx),
|
||||
PassMode::Cast { cast, pad_i32: _ } => cast.llvm_type(cx),
|
||||
PassMode::Indirect { .. } => {
|
||||
llargument_tys.push(cx.type_ptr());
|
||||
cx.type_void()
|
||||
@ -340,29 +340,78 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> {
|
||||
};
|
||||
|
||||
for arg in args {
|
||||
// Note that the exact number of arguments pushed here is carefully synchronized with
|
||||
// code all over the place, both in the codegen_llvm and codegen_ssa crates. That's how
|
||||
// other code then knows which LLVM argument(s) correspond to the n-th Rust argument.
|
||||
let llarg_ty = match &arg.mode {
|
||||
PassMode::Ignore => continue,
|
||||
PassMode::Direct(_) => arg.layout.immediate_llvm_type(cx),
|
||||
PassMode::Direct(_) => {
|
||||
// ABI-compatible Rust types have the same `layout.abi` (up to validity ranges),
|
||||
// and for Scalar ABIs the LLVM type is fully determined by `layout.abi`,
|
||||
// guarnateeing that we generate ABI-compatible LLVM IR. Things get tricky for
|
||||
// aggregates...
|
||||
if matches!(arg.layout.abi, abi::Abi::Aggregate { .. }) {
|
||||
assert!(
|
||||
arg.layout.is_sized(),
|
||||
"`PassMode::Direct` for unsized type: {}",
|
||||
arg.layout.ty
|
||||
);
|
||||
// This really shouldn't happen, since `immediate_llvm_type` will use
|
||||
// `layout.fields` to turn this Rust type into an LLVM type. This means all
|
||||
// sorts of Rust type details leak into the ABI. However wasm sadly *does*
|
||||
// currently use this mode so we have to allow it -- but we absolutely
|
||||
// shouldn't let any more targets do that.
|
||||
// (Also see <https://github.com/rust-lang/rust/issues/115666>.)
|
||||
assert!(
|
||||
matches!(&*cx.tcx.sess.target.arch, "wasm32" | "wasm64"),
|
||||
"`PassMode::Direct` for aggregates only allowed on wasm targets\nProblematic type: {:#?}",
|
||||
arg.layout,
|
||||
);
|
||||
}
|
||||
arg.layout.immediate_llvm_type(cx)
|
||||
}
|
||||
PassMode::Pair(..) => {
|
||||
// ABI-compatible Rust types have the same `layout.abi` (up to validity ranges),
|
||||
// so for ScalarPair we can easily be sure that we are generating ABI-compatible
|
||||
// LLVM IR.
|
||||
assert!(
|
||||
matches!(arg.layout.abi, abi::Abi::ScalarPair(..)),
|
||||
"PassMode::Pair for type {}",
|
||||
arg.layout.ty
|
||||
);
|
||||
llargument_tys.push(arg.layout.scalar_pair_element_llvm_type(cx, 0, true));
|
||||
llargument_tys.push(arg.layout.scalar_pair_element_llvm_type(cx, 1, true));
|
||||
continue;
|
||||
}
|
||||
PassMode::Indirect { attrs: _, extra_attrs: Some(_), on_stack: _ } => {
|
||||
PassMode::Indirect { attrs: _, meta_attrs: Some(_), on_stack } => {
|
||||
// `Indirect` with metadata is only for unsized types, and doesn't work with
|
||||
// on-stack passing.
|
||||
assert!(arg.layout.is_unsized() && !on_stack);
|
||||
// Construct the type of a (wide) pointer to `ty`, and pass its two fields.
|
||||
// Any two ABI-compatible unsized types have the same metadata type and
|
||||
// moreover the same metadata value leads to the same dynamic size and
|
||||
// alignment, so this respects ABI compatibility.
|
||||
let ptr_ty = Ty::new_mut_ptr(cx.tcx, arg.layout.ty);
|
||||
let ptr_layout = cx.layout_of(ptr_ty);
|
||||
llargument_tys.push(ptr_layout.scalar_pair_element_llvm_type(cx, 0, true));
|
||||
llargument_tys.push(ptr_layout.scalar_pair_element_llvm_type(cx, 1, true));
|
||||
continue;
|
||||
}
|
||||
PassMode::Cast(cast, pad_i32) => {
|
||||
PassMode::Indirect { attrs: _, meta_attrs: None, on_stack: _ } => {
|
||||
assert!(arg.layout.is_sized());
|
||||
cx.type_ptr()
|
||||
}
|
||||
PassMode::Cast { cast, pad_i32 } => {
|
||||
// `Cast` means "transmute to `CastType`"; that only makes sense for sized types.
|
||||
assert!(arg.layout.is_sized());
|
||||
// add padding
|
||||
if *pad_i32 {
|
||||
llargument_tys.push(Reg::i32().llvm_type(cx));
|
||||
}
|
||||
// Compute the LLVM type we use for this function from the cast type.
|
||||
// We assume here that ABI-compatible Rust types have the same cast type.
|
||||
cast.llvm_type(cx)
|
||||
}
|
||||
PassMode::Indirect { attrs: _, extra_attrs: None, on_stack: _ } => cx.type_ptr(),
|
||||
};
|
||||
llargument_tys.push(llarg_ty);
|
||||
}
|
||||
@ -405,13 +454,13 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> {
|
||||
PassMode::Direct(attrs) => {
|
||||
attrs.apply_attrs_to_llfn(llvm::AttributePlace::ReturnValue, cx, llfn);
|
||||
}
|
||||
PassMode::Indirect { attrs, extra_attrs: _, on_stack } => {
|
||||
PassMode::Indirect { attrs, meta_attrs: _, on_stack } => {
|
||||
assert!(!on_stack);
|
||||
let i = apply(attrs);
|
||||
let sret = llvm::CreateStructRetAttr(cx.llcx, self.ret.layout.llvm_type(cx));
|
||||
attributes::apply_to_llfn(llfn, llvm::AttributePlace::Argument(i), &[sret]);
|
||||
}
|
||||
PassMode::Cast(cast, _) => {
|
||||
PassMode::Cast { cast, pad_i32: _ } => {
|
||||
cast.attrs.apply_attrs_to_llfn(llvm::AttributePlace::ReturnValue, cx, llfn);
|
||||
}
|
||||
_ => {}
|
||||
@ -419,25 +468,25 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> {
|
||||
for arg in self.args.iter() {
|
||||
match &arg.mode {
|
||||
PassMode::Ignore => {}
|
||||
PassMode::Indirect { attrs, extra_attrs: None, on_stack: true } => {
|
||||
PassMode::Indirect { attrs, meta_attrs: None, on_stack: true } => {
|
||||
let i = apply(attrs);
|
||||
let byval = llvm::CreateByValAttr(cx.llcx, arg.layout.llvm_type(cx));
|
||||
attributes::apply_to_llfn(llfn, llvm::AttributePlace::Argument(i), &[byval]);
|
||||
}
|
||||
PassMode::Direct(attrs)
|
||||
| PassMode::Indirect { attrs, extra_attrs: None, on_stack: false } => {
|
||||
| PassMode::Indirect { attrs, meta_attrs: None, on_stack: false } => {
|
||||
apply(attrs);
|
||||
}
|
||||
PassMode::Indirect { attrs, extra_attrs: Some(extra_attrs), on_stack } => {
|
||||
PassMode::Indirect { attrs, meta_attrs: Some(meta_attrs), on_stack } => {
|
||||
assert!(!on_stack);
|
||||
apply(attrs);
|
||||
apply(extra_attrs);
|
||||
apply(meta_attrs);
|
||||
}
|
||||
PassMode::Pair(a, b) => {
|
||||
apply(a);
|
||||
apply(b);
|
||||
}
|
||||
PassMode::Cast(cast, pad_i32) => {
|
||||
PassMode::Cast { cast, pad_i32 } => {
|
||||
if *pad_i32 {
|
||||
apply(&ArgAttributes::new());
|
||||
}
|
||||
@ -467,13 +516,13 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> {
|
||||
PassMode::Direct(attrs) => {
|
||||
attrs.apply_attrs_to_callsite(llvm::AttributePlace::ReturnValue, bx.cx, callsite);
|
||||
}
|
||||
PassMode::Indirect { attrs, extra_attrs: _, on_stack } => {
|
||||
PassMode::Indirect { attrs, meta_attrs: _, on_stack } => {
|
||||
assert!(!on_stack);
|
||||
let i = apply(bx.cx, attrs);
|
||||
let sret = llvm::CreateStructRetAttr(bx.cx.llcx, self.ret.layout.llvm_type(bx));
|
||||
attributes::apply_to_callsite(callsite, llvm::AttributePlace::Argument(i), &[sret]);
|
||||
}
|
||||
PassMode::Cast(cast, _) => {
|
||||
PassMode::Cast { cast, pad_i32: _ } => {
|
||||
cast.attrs.apply_attrs_to_callsite(
|
||||
llvm::AttributePlace::ReturnValue,
|
||||
&bx.cx,
|
||||
@ -495,7 +544,7 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> {
|
||||
for arg in self.args.iter() {
|
||||
match &arg.mode {
|
||||
PassMode::Ignore => {}
|
||||
PassMode::Indirect { attrs, extra_attrs: None, on_stack: true } => {
|
||||
PassMode::Indirect { attrs, meta_attrs: None, on_stack: true } => {
|
||||
let i = apply(bx.cx, attrs);
|
||||
let byval = llvm::CreateByValAttr(bx.cx.llcx, arg.layout.llvm_type(bx));
|
||||
attributes::apply_to_callsite(
|
||||
@ -505,18 +554,18 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> {
|
||||
);
|
||||
}
|
||||
PassMode::Direct(attrs)
|
||||
| PassMode::Indirect { attrs, extra_attrs: None, on_stack: false } => {
|
||||
| PassMode::Indirect { attrs, meta_attrs: None, on_stack: false } => {
|
||||
apply(bx.cx, attrs);
|
||||
}
|
||||
PassMode::Indirect { attrs, extra_attrs: Some(extra_attrs), on_stack: _ } => {
|
||||
PassMode::Indirect { attrs, meta_attrs: Some(meta_attrs), on_stack: _ } => {
|
||||
apply(bx.cx, attrs);
|
||||
apply(bx.cx, extra_attrs);
|
||||
apply(bx.cx, meta_attrs);
|
||||
}
|
||||
PassMode::Pair(a, b) => {
|
||||
apply(bx.cx, a);
|
||||
apply(bx.cx, b);
|
||||
}
|
||||
PassMode::Cast(cast, pad_i32) => {
|
||||
PassMode::Cast { cast, pad_i32 } => {
|
||||
if *pad_i32 {
|
||||
apply(bx.cx, &ArgAttributes::new());
|
||||
}
|
||||
@ -571,7 +620,9 @@ impl From<Conv> for llvm::CallConv {
|
||||
Conv::C | Conv::Rust | Conv::CCmseNonSecureCall | Conv::RiscvInterrupt { .. } => {
|
||||
llvm::CCallConv
|
||||
}
|
||||
Conv::RustCold => llvm::ColdCallConv,
|
||||
Conv::Cold => llvm::ColdCallConv,
|
||||
Conv::PreserveMost => llvm::PreserveMost,
|
||||
Conv::PreserveAll => llvm::PreserveAll,
|
||||
Conv::AmdGpuKernel => llvm::AmdGpuKernel,
|
||||
Conv::AvrInterrupt => llvm::AvrInterrupt,
|
||||
Conv::AvrNonBlockingInterrupt => llvm::AvrNonBlockingInterrupt,
|
||||
|
||||
@ -367,7 +367,7 @@ impl<'a> LlvmArchiveBuilder<'a> {
|
||||
match addition {
|
||||
Addition::File { path, name_in_archive } => {
|
||||
let path = CString::new(path.to_str().unwrap())?;
|
||||
let name = CString::new(name_in_archive.clone())?;
|
||||
let name = CString::new(name_in_archive.as_bytes())?;
|
||||
members.push(llvm::LLVMRustArchiveMemberNew(
|
||||
path.as_ptr(),
|
||||
name.as_ptr(),
|
||||
|
||||
@ -1,6 +1,8 @@
|
||||
use crate::back::write::{self, save_temp_bitcode, CodegenDiagnosticsStage, DiagnosticHandlers};
|
||||
use crate::back::write::{
|
||||
self, bitcode_section_name, save_temp_bitcode, CodegenDiagnosticsStage, DiagnosticHandlers,
|
||||
};
|
||||
use crate::errors::{
|
||||
DynamicLinkingWithLTO, LlvmError, LtoBitcodeFromRlib, LtoDisallowed, LtoDylib,
|
||||
DynamicLinkingWithLTO, LlvmError, LtoBitcodeFromRlib, LtoDisallowed, LtoDylib, LtoProcMacro,
|
||||
};
|
||||
use crate::llvm::{self, build_string};
|
||||
use crate::{LlvmCodegenBackend, ModuleLlvm};
|
||||
@ -24,6 +26,7 @@ use std::ffi::{CStr, CString};
|
||||
use std::fs::File;
|
||||
use std::io;
|
||||
use std::iter;
|
||||
use std::mem::ManuallyDrop;
|
||||
use std::path::Path;
|
||||
use std::slice;
|
||||
use std::sync::Arc;
|
||||
@ -34,8 +37,12 @@ pub const THIN_LTO_KEYS_INCR_COMP_FILE_NAME: &str = "thin-lto-past-keys.bin";
|
||||
|
||||
pub fn crate_type_allows_lto(crate_type: CrateType) -> bool {
|
||||
match crate_type {
|
||||
CrateType::Executable | CrateType::Dylib | CrateType::Staticlib | CrateType::Cdylib => true,
|
||||
CrateType::Rlib | CrateType::ProcMacro => false,
|
||||
CrateType::Executable
|
||||
| CrateType::Dylib
|
||||
| CrateType::Staticlib
|
||||
| CrateType::Cdylib
|
||||
| CrateType::ProcMacro => true,
|
||||
CrateType::Rlib => false,
|
||||
}
|
||||
}
|
||||
|
||||
@ -85,6 +92,11 @@ fn prepare_lto(
|
||||
diag_handler.emit_err(LtoDylib);
|
||||
return Err(FatalError);
|
||||
}
|
||||
} else if *crate_type == CrateType::ProcMacro {
|
||||
if !cgcx.opts.unstable_opts.dylib_lto {
|
||||
diag_handler.emit_err(LtoProcMacro);
|
||||
return Err(FatalError);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -120,6 +132,7 @@ fn prepare_lto(
|
||||
info!("adding bitcode from {}", name);
|
||||
match get_bitcode_slice_from_object_data(
|
||||
child.data(&*archive_data).expect("corrupt rlib"),
|
||||
cgcx,
|
||||
) {
|
||||
Ok(data) => {
|
||||
let module = SerializedModule::FromRlib(data.to_vec());
|
||||
@ -141,10 +154,29 @@ fn prepare_lto(
|
||||
Ok((symbols_below_threshold, upstream_modules))
|
||||
}
|
||||
|
||||
fn get_bitcode_slice_from_object_data(obj: &[u8]) -> Result<&[u8], LtoBitcodeFromRlib> {
|
||||
fn get_bitcode_slice_from_object_data<'a>(
|
||||
obj: &'a [u8],
|
||||
cgcx: &CodegenContext<LlvmCodegenBackend>,
|
||||
) -> Result<&'a [u8], LtoBitcodeFromRlib> {
|
||||
// We're about to assume the data here is an object file with sections, but if it's raw LLVM IR that
|
||||
// won't work. Fortunately, if that's what we have we can just return the object directly, so we sniff
|
||||
// the relevant magic strings here and return.
|
||||
if obj.starts_with(b"\xDE\xC0\x17\x0B") || obj.starts_with(b"BC\xC0\xDE") {
|
||||
return Ok(obj);
|
||||
}
|
||||
// We drop the "__LLVM," prefix here because on Apple platforms there's a notion of "segment name"
|
||||
// which in the public API for sections gets treated as part of the section name, but internally
|
||||
// in MachOObjectFile.cpp gets treated separately.
|
||||
let section_name = bitcode_section_name(cgcx).trim_start_matches("__LLVM,");
|
||||
let mut len = 0;
|
||||
let data =
|
||||
unsafe { llvm::LLVMRustGetBitcodeSliceFromObjectData(obj.as_ptr(), obj.len(), &mut len) };
|
||||
let data = unsafe {
|
||||
llvm::LLVMRustGetSliceFromObjectDataByName(
|
||||
obj.as_ptr(),
|
||||
obj.len(),
|
||||
section_name.as_ptr(),
|
||||
&mut len,
|
||||
)
|
||||
};
|
||||
if !data.is_null() {
|
||||
assert!(len != 0);
|
||||
let bc = unsafe { slice::from_raw_parts(data, len) };
|
||||
@ -441,7 +473,7 @@ fn thin_lto(
|
||||
|
||||
for (i, (name, buffer)) in modules.into_iter().enumerate() {
|
||||
info!("local module: {} - {}", i, name);
|
||||
let cname = CString::new(name.clone()).unwrap();
|
||||
let cname = CString::new(name.as_bytes()).unwrap();
|
||||
thin_modules.push(llvm::ThinLTOModule {
|
||||
identifier: cname.as_ptr(),
|
||||
data: buffer.data().as_ptr(),
|
||||
@ -583,7 +615,7 @@ pub(crate) fn run_pass_manager(
|
||||
module: &mut ModuleCodegen<ModuleLlvm>,
|
||||
thin: bool,
|
||||
) -> Result<(), FatalError> {
|
||||
let _timer = cgcx.prof.verbose_generic_activity_with_arg("LLVM_lto_optimize", &*module.name);
|
||||
let _timer = cgcx.prof.generic_activity_with_arg("LLVM_lto_optimize", &*module.name);
|
||||
let config = cgcx.config(module.kind);
|
||||
|
||||
// Now we have one massive module inside of llmod. Time to run the
|
||||
@ -705,7 +737,7 @@ pub unsafe fn optimize_thin_module(
|
||||
let llcx = llvm::LLVMRustContextCreate(cgcx.fewer_names);
|
||||
let llmod_raw = parse_module(llcx, module_name, thin_module.data(), &diag_handler)? as *const _;
|
||||
let mut module = ModuleCodegen {
|
||||
module_llvm: ModuleLlvm { llmod_raw, llcx, tm },
|
||||
module_llvm: ModuleLlvm { llmod_raw, llcx, tm: ManuallyDrop::new(tm) },
|
||||
name: thin_module.name().to_string(),
|
||||
kind: ModuleKind::Regular,
|
||||
};
|
||||
|
||||
103
compiler/rustc_codegen_llvm/src/back/owned_target_machine.rs
Normal file
103
compiler/rustc_codegen_llvm/src/back/owned_target_machine.rs
Normal file
@ -0,0 +1,103 @@
|
||||
use std::{
|
||||
ffi::{c_char, CStr},
|
||||
marker::PhantomData,
|
||||
ops::Deref,
|
||||
ptr::NonNull,
|
||||
};
|
||||
|
||||
use rustc_data_structures::small_c_str::SmallCStr;
|
||||
|
||||
use crate::{errors::LlvmError, llvm};
|
||||
|
||||
/// Responsible for safely creating and disposing llvm::TargetMachine via ffi functions.
|
||||
/// Not cloneable as there is no clone function for llvm::TargetMachine.
|
||||
#[repr(transparent)]
|
||||
pub struct OwnedTargetMachine {
|
||||
tm_unique: NonNull<llvm::TargetMachine>,
|
||||
phantom: PhantomData<llvm::TargetMachine>,
|
||||
}
|
||||
|
||||
impl OwnedTargetMachine {
|
||||
pub fn new(
|
||||
triple: &CStr,
|
||||
cpu: &CStr,
|
||||
features: &CStr,
|
||||
abi: &CStr,
|
||||
model: llvm::CodeModel,
|
||||
reloc: llvm::RelocModel,
|
||||
level: llvm::CodeGenOptLevel,
|
||||
use_soft_fp: bool,
|
||||
function_sections: bool,
|
||||
data_sections: bool,
|
||||
unique_section_names: bool,
|
||||
trap_unreachable: bool,
|
||||
singletree: bool,
|
||||
asm_comments: bool,
|
||||
emit_stack_size_section: bool,
|
||||
relax_elf_relocations: bool,
|
||||
use_init_array: bool,
|
||||
split_dwarf_file: &CStr,
|
||||
output_obj_file: &CStr,
|
||||
debug_info_compression: &CStr,
|
||||
force_emulated_tls: bool,
|
||||
args_cstr_buff: &[u8],
|
||||
) -> Result<Self, LlvmError<'static>> {
|
||||
assert!(args_cstr_buff.len() > 0);
|
||||
assert!(
|
||||
*args_cstr_buff.last().unwrap() == 0,
|
||||
"The last character must be a null terminator."
|
||||
);
|
||||
|
||||
// SAFETY: llvm::LLVMRustCreateTargetMachine copies pointed to data
|
||||
let tm_ptr = unsafe {
|
||||
llvm::LLVMRustCreateTargetMachine(
|
||||
triple.as_ptr(),
|
||||
cpu.as_ptr(),
|
||||
features.as_ptr(),
|
||||
abi.as_ptr(),
|
||||
model,
|
||||
reloc,
|
||||
level,
|
||||
use_soft_fp,
|
||||
function_sections,
|
||||
data_sections,
|
||||
unique_section_names,
|
||||
trap_unreachable,
|
||||
singletree,
|
||||
asm_comments,
|
||||
emit_stack_size_section,
|
||||
relax_elf_relocations,
|
||||
use_init_array,
|
||||
split_dwarf_file.as_ptr(),
|
||||
output_obj_file.as_ptr(),
|
||||
debug_info_compression.as_ptr(),
|
||||
force_emulated_tls,
|
||||
args_cstr_buff.as_ptr() as *const c_char,
|
||||
args_cstr_buff.len(),
|
||||
)
|
||||
};
|
||||
|
||||
NonNull::new(tm_ptr)
|
||||
.map(|tm_unique| Self { tm_unique, phantom: PhantomData })
|
||||
.ok_or_else(|| LlvmError::CreateTargetMachine { triple: SmallCStr::from(triple) })
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for OwnedTargetMachine {
|
||||
type Target = llvm::TargetMachine;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
// SAFETY: constructing ensures we have a valid pointer created by llvm::LLVMRustCreateTargetMachine
|
||||
unsafe { self.tm_unique.as_ref() }
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for OwnedTargetMachine {
|
||||
fn drop(&mut self) {
|
||||
// SAFETY: constructing ensures we have a valid pointer created by llvm::LLVMRustCreateTargetMachine
|
||||
// OwnedTargetMachine is not copyable so there is no double free or use after free
|
||||
unsafe {
|
||||
llvm::LLVMRustDisposeTargetMachine(self.tm_unique.as_mut());
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,17 +1,22 @@
|
||||
use crate::back::lto::ThinBuffer;
|
||||
use crate::back::owned_target_machine::OwnedTargetMachine;
|
||||
use crate::back::profiling::{
|
||||
selfprofile_after_pass_callback, selfprofile_before_pass_callback, LlvmSelfProfiler,
|
||||
};
|
||||
use crate::base;
|
||||
use crate::common;
|
||||
use crate::errors::{
|
||||
CopyBitcode, FromLlvmDiag, FromLlvmOptimizationDiag, LlvmError, WithLlvmError, WriteBytecode,
|
||||
CopyBitcode, FromLlvmDiag, FromLlvmOptimizationDiag, LlvmError, UnknownCompression,
|
||||
WithLlvmError, WriteBytecode,
|
||||
};
|
||||
use crate::llvm::{self, DiagnosticInfo, PassManager};
|
||||
use crate::llvm_util;
|
||||
use crate::type_::Type;
|
||||
use crate::LlvmCodegenBackend;
|
||||
use crate::ModuleLlvm;
|
||||
use llvm::{
|
||||
LLVMRustLLVMHasZlibCompressionForDebugSymbols, LLVMRustLLVMHasZstdCompressionForDebugSymbols,
|
||||
};
|
||||
use rustc_codegen_ssa::back::link::ensure_removed;
|
||||
use rustc_codegen_ssa::back::write::{
|
||||
BitcodeSection, CodegenContext, EmitObj, ModuleConfig, TargetMachineFactoryConfig,
|
||||
@ -94,8 +99,8 @@ pub fn write_output_file<'ll>(
|
||||
}
|
||||
}
|
||||
|
||||
pub fn create_informational_target_machine(sess: &Session) -> &'static mut llvm::TargetMachine {
|
||||
let config = TargetMachineFactoryConfig { split_dwarf_file: None };
|
||||
pub fn create_informational_target_machine(sess: &Session) -> OwnedTargetMachine {
|
||||
let config = TargetMachineFactoryConfig { split_dwarf_file: None, output_obj_file: None };
|
||||
// Can't use query system here quite yet because this function is invoked before the query
|
||||
// system/tcx is set up.
|
||||
let features = llvm_util::global_llvm_features(sess, false);
|
||||
@ -103,7 +108,7 @@ pub fn create_informational_target_machine(sess: &Session) -> &'static mut llvm:
|
||||
.unwrap_or_else(|err| llvm_err(sess.diagnostic(), err).raise())
|
||||
}
|
||||
|
||||
pub fn create_target_machine(tcx: TyCtxt<'_>, mod_name: &str) -> &'static mut llvm::TargetMachine {
|
||||
pub fn create_target_machine(tcx: TyCtxt<'_>, mod_name: &str) -> OwnedTargetMachine {
|
||||
let split_dwarf_file = if tcx.sess.target_can_use_split_dwarf() {
|
||||
tcx.output_filenames(()).split_dwarf_path(
|
||||
tcx.sess.split_debuginfo(),
|
||||
@ -113,7 +118,11 @@ pub fn create_target_machine(tcx: TyCtxt<'_>, mod_name: &str) -> &'static mut ll
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let config = TargetMachineFactoryConfig { split_dwarf_file };
|
||||
|
||||
let output_obj_file =
|
||||
Some(tcx.output_filenames(()).temp_path(OutputType::Object, Some(mod_name)));
|
||||
let config = TargetMachineFactoryConfig { split_dwarf_file, output_obj_file };
|
||||
|
||||
target_machine_factory(
|
||||
&tcx.sess,
|
||||
tcx.backend_optimization_level(()),
|
||||
@ -216,36 +225,73 @@ pub fn target_machine_factory(
|
||||
|
||||
let force_emulated_tls = sess.target.force_emulated_tls;
|
||||
|
||||
Arc::new(move |config: TargetMachineFactoryConfig| {
|
||||
let split_dwarf_file =
|
||||
path_mapping.map_prefix(config.split_dwarf_file.unwrap_or_default()).0;
|
||||
let split_dwarf_file = CString::new(split_dwarf_file.to_str().unwrap()).unwrap();
|
||||
// copy the exe path, followed by path all into one buffer
|
||||
// null terminating them so we can use them as null terminated strings
|
||||
let args_cstr_buff = {
|
||||
let mut args_cstr_buff: Vec<u8> = Vec::new();
|
||||
let exe_path = std::env::current_exe().unwrap_or_default();
|
||||
let exe_path_str = exe_path.into_os_string().into_string().unwrap_or_default();
|
||||
|
||||
let tm = unsafe {
|
||||
llvm::LLVMRustCreateTargetMachine(
|
||||
triple.as_ptr(),
|
||||
cpu.as_ptr(),
|
||||
features.as_ptr(),
|
||||
abi.as_ptr(),
|
||||
code_model,
|
||||
reloc_model,
|
||||
opt_level,
|
||||
use_softfp,
|
||||
ffunction_sections,
|
||||
fdata_sections,
|
||||
funique_section_names,
|
||||
trap_unreachable,
|
||||
singlethread,
|
||||
asm_comments,
|
||||
emit_stack_size_section,
|
||||
relax_elf_relocations,
|
||||
use_init_array,
|
||||
split_dwarf_file.as_ptr(),
|
||||
force_emulated_tls,
|
||||
)
|
||||
args_cstr_buff.extend_from_slice(exe_path_str.as_bytes());
|
||||
args_cstr_buff.push(0);
|
||||
|
||||
for arg in sess.expanded_args.iter() {
|
||||
args_cstr_buff.extend_from_slice(arg.as_bytes());
|
||||
args_cstr_buff.push(0);
|
||||
}
|
||||
|
||||
args_cstr_buff
|
||||
};
|
||||
|
||||
let debuginfo_compression = sess.opts.debuginfo_compression.to_string();
|
||||
match sess.opts.debuginfo_compression {
|
||||
rustc_session::config::DebugInfoCompression::Zlib => {
|
||||
if !unsafe { LLVMRustLLVMHasZlibCompressionForDebugSymbols() } {
|
||||
sess.emit_warning(UnknownCompression { algorithm: "zlib" });
|
||||
}
|
||||
}
|
||||
rustc_session::config::DebugInfoCompression::Zstd => {
|
||||
if !unsafe { LLVMRustLLVMHasZstdCompressionForDebugSymbols() } {
|
||||
sess.emit_warning(UnknownCompression { algorithm: "zstd" });
|
||||
}
|
||||
}
|
||||
rustc_session::config::DebugInfoCompression::None => {}
|
||||
};
|
||||
let debuginfo_compression = SmallCStr::new(&debuginfo_compression);
|
||||
|
||||
Arc::new(move |config: TargetMachineFactoryConfig| {
|
||||
let path_to_cstring_helper = |path: Option<PathBuf>| -> CString {
|
||||
let path = path_mapping.map_prefix(path.unwrap_or_default()).0;
|
||||
CString::new(path.to_str().unwrap()).unwrap()
|
||||
};
|
||||
|
||||
tm.ok_or_else(|| LlvmError::CreateTargetMachine { triple: triple.clone() })
|
||||
let split_dwarf_file = path_to_cstring_helper(config.split_dwarf_file);
|
||||
let output_obj_file = path_to_cstring_helper(config.output_obj_file);
|
||||
|
||||
OwnedTargetMachine::new(
|
||||
&triple,
|
||||
&cpu,
|
||||
&features,
|
||||
&abi,
|
||||
code_model,
|
||||
reloc_model,
|
||||
opt_level,
|
||||
use_softfp,
|
||||
ffunction_sections,
|
||||
fdata_sections,
|
||||
funique_section_names,
|
||||
trap_unreachable,
|
||||
singlethread,
|
||||
asm_comments,
|
||||
emit_stack_size_section,
|
||||
relax_elf_relocations,
|
||||
use_init_array,
|
||||
&split_dwarf_file,
|
||||
&output_obj_file,
|
||||
&debuginfo_compression,
|
||||
force_emulated_tls,
|
||||
&args_cstr_buff,
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
@ -853,6 +899,27 @@ fn create_section_with_flags_asm(section_name: &str, section_flags: &str, data:
|
||||
asm
|
||||
}
|
||||
|
||||
fn target_is_apple(cgcx: &CodegenContext<LlvmCodegenBackend>) -> bool {
|
||||
cgcx.opts.target_triple.triple().contains("-ios")
|
||||
|| cgcx.opts.target_triple.triple().contains("-darwin")
|
||||
|| cgcx.opts.target_triple.triple().contains("-tvos")
|
||||
|| cgcx.opts.target_triple.triple().contains("-watchos")
|
||||
}
|
||||
|
||||
fn target_is_aix(cgcx: &CodegenContext<LlvmCodegenBackend>) -> bool {
|
||||
cgcx.opts.target_triple.triple().contains("-aix")
|
||||
}
|
||||
|
||||
pub(crate) fn bitcode_section_name(cgcx: &CodegenContext<LlvmCodegenBackend>) -> &'static str {
|
||||
if target_is_apple(cgcx) {
|
||||
"__LLVM,__bitcode\0"
|
||||
} else if target_is_aix(cgcx) {
|
||||
".ipa\0"
|
||||
} else {
|
||||
".llvmbc\0"
|
||||
}
|
||||
}
|
||||
|
||||
/// Embed the bitcode of an LLVM module in the LLVM module itself.
|
||||
///
|
||||
/// This is done primarily for iOS where it appears to be standard to compile C
|
||||
@ -913,11 +980,8 @@ unsafe fn embed_bitcode(
|
||||
// Unfortunately, LLVM provides no way to set custom section flags. For ELF
|
||||
// and COFF we emit the sections using module level inline assembly for that
|
||||
// reason (see issue #90326 for historical background).
|
||||
let is_aix = cgcx.opts.target_triple.triple().contains("-aix");
|
||||
let is_apple = cgcx.opts.target_triple.triple().contains("-ios")
|
||||
|| cgcx.opts.target_triple.triple().contains("-darwin")
|
||||
|| cgcx.opts.target_triple.triple().contains("-tvos")
|
||||
|| cgcx.opts.target_triple.triple().contains("-watchos");
|
||||
let is_aix = target_is_aix(cgcx);
|
||||
let is_apple = target_is_apple(cgcx);
|
||||
if is_apple
|
||||
|| is_aix
|
||||
|| cgcx.opts.target_triple.triple().starts_with("wasm")
|
||||
@ -932,13 +996,7 @@ unsafe fn embed_bitcode(
|
||||
);
|
||||
llvm::LLVMSetInitializer(llglobal, llconst);
|
||||
|
||||
let section = if is_apple {
|
||||
"__LLVM,__bitcode\0"
|
||||
} else if is_aix {
|
||||
".ipa\0"
|
||||
} else {
|
||||
".llvmbc\0"
|
||||
};
|
||||
let section = bitcode_section_name(cgcx);
|
||||
llvm::LLVMSetSection(llglobal, section.as_ptr().cast());
|
||||
llvm::LLVMRustSetLinkage(llglobal, llvm::Linkage::PrivateLinkage);
|
||||
llvm::LLVMSetGlobalConstant(llglobal, llvm::True);
|
||||
|
||||
@ -95,7 +95,8 @@ pub fn get_fn<'ll, 'tcx>(cx: &CodegenCx<'ll, 'tcx>, instance: Instance<'tcx>) ->
|
||||
unsafe {
|
||||
llvm::LLVMRustSetLinkage(llfn, llvm::Linkage::ExternalLinkage);
|
||||
|
||||
let is_generic = instance.args.non_erasable_generics().next().is_some();
|
||||
let is_generic =
|
||||
instance.args.non_erasable_generics(tcx, instance.def_id()).next().is_some();
|
||||
|
||||
if is_generic {
|
||||
// This is a monomorphization. Its expected visibility depends
|
||||
|
||||
@ -10,6 +10,7 @@ use crate::value::Value;
|
||||
|
||||
use cstr::cstr;
|
||||
use rustc_codegen_ssa::base::{wants_msvc_seh, wants_wasm_eh};
|
||||
use rustc_codegen_ssa::errors as ssa_errors;
|
||||
use rustc_codegen_ssa::traits::*;
|
||||
use rustc_data_structures::base_n;
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
@ -159,9 +160,9 @@ pub unsafe fn create_module<'ll>(
|
||||
|
||||
// Ensure the data-layout values hardcoded remain the defaults.
|
||||
if sess.target.is_builtin {
|
||||
// tm is disposed by its drop impl
|
||||
let tm = crate::back::write::create_informational_target_machine(tcx.sess);
|
||||
llvm::LLVMRustSetDataLayoutFromTargetMachine(llmod, tm);
|
||||
llvm::LLVMRustDisposeTargetMachine(tm);
|
||||
llvm::LLVMRustSetDataLayoutFromTargetMachine(llmod, &tm);
|
||||
|
||||
let llvm_data_layout = llvm::LLVMGetDataLayoutStr(llmod);
|
||||
let llvm_data_layout = str::from_utf8(CStr::from_ptr(llvm_data_layout).to_bytes())
|
||||
@ -1000,7 +1001,7 @@ impl<'tcx> LayoutOfHelpers<'tcx> for CodegenCx<'_, 'tcx> {
|
||||
if let LayoutError::SizeOverflow(_) | LayoutError::ReferencesError(_) = err {
|
||||
self.sess().emit_fatal(Spanned { span, node: err.into_diagnostic() })
|
||||
} else {
|
||||
span_bug!(span, "failed to get layout for `{ty}`: {err:?}")
|
||||
self.tcx.sess.emit_fatal(ssa_errors::FailedToGetLayout { span, ty, err })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
use rustc_middle::mir::coverage::{CounterId, MappedExpressionIndex};
|
||||
use rustc_middle::mir::coverage::{CounterId, ExpressionId, Operand};
|
||||
|
||||
/// Must match the layout of `LLVMRustCounterKind`.
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
@ -30,11 +30,8 @@ pub struct Counter {
|
||||
}
|
||||
|
||||
impl Counter {
|
||||
/// Constructs a new `Counter` of kind `Zero`. For this `CounterKind`, the
|
||||
/// `id` is not used.
|
||||
pub fn zero() -> Self {
|
||||
Self { kind: CounterKind::Zero, id: 0 }
|
||||
}
|
||||
/// A `Counter` of kind `Zero`. For this counter kind, the `id` is not used.
|
||||
pub(crate) const ZERO: Self = Self { kind: CounterKind::Zero, id: 0 };
|
||||
|
||||
/// Constructs a new `Counter` of kind `CounterValueReference`.
|
||||
pub fn counter_value_reference(counter_id: CounterId) -> Self {
|
||||
@ -42,20 +39,16 @@ impl Counter {
|
||||
}
|
||||
|
||||
/// Constructs a new `Counter` of kind `Expression`.
|
||||
pub fn expression(mapped_expression_index: MappedExpressionIndex) -> Self {
|
||||
Self { kind: CounterKind::Expression, id: mapped_expression_index.into() }
|
||||
pub(crate) fn expression(expression_id: ExpressionId) -> Self {
|
||||
Self { kind: CounterKind::Expression, id: expression_id.as_u32() }
|
||||
}
|
||||
|
||||
/// Returns true if the `Counter` kind is `Zero`.
|
||||
pub fn is_zero(&self) -> bool {
|
||||
matches!(self.kind, CounterKind::Zero)
|
||||
}
|
||||
|
||||
/// An explicitly-named function to get the ID value, making it more obvious
|
||||
/// that the stored value is now 0-based.
|
||||
pub fn zero_based_id(&self) -> u32 {
|
||||
debug_assert!(!self.is_zero(), "`id` is undefined for CounterKind::Zero");
|
||||
self.id
|
||||
pub(crate) fn from_operand(operand: Operand) -> Self {
|
||||
match operand {
|
||||
Operand::Zero => Self::ZERO,
|
||||
Operand::Counter(id) => Self::counter_value_reference(id),
|
||||
Operand::Expression(id) => Self::expression(id),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -81,6 +74,11 @@ pub struct CounterExpression {
|
||||
}
|
||||
|
||||
impl CounterExpression {
|
||||
/// The dummy expression `(0 - 0)` has a representation of all zeroes,
|
||||
/// making it marginally more efficient to initialize than `(0 + 0)`.
|
||||
pub(crate) const DUMMY: Self =
|
||||
Self { lhs: Counter::ZERO, kind: ExprKind::Subtract, rhs: Counter::ZERO };
|
||||
|
||||
pub fn new(lhs: Counter, kind: ExprKind, rhs: Counter) -> Self {
|
||||
Self { kind, lhs, rhs }
|
||||
}
|
||||
@ -172,7 +170,7 @@ impl CounterMappingRegion {
|
||||
) -> Self {
|
||||
Self {
|
||||
counter,
|
||||
false_counter: Counter::zero(),
|
||||
false_counter: Counter::ZERO,
|
||||
file_id,
|
||||
expanded_file_id: 0,
|
||||
start_line,
|
||||
@ -220,8 +218,8 @@ impl CounterMappingRegion {
|
||||
end_col: u32,
|
||||
) -> Self {
|
||||
Self {
|
||||
counter: Counter::zero(),
|
||||
false_counter: Counter::zero(),
|
||||
counter: Counter::ZERO,
|
||||
false_counter: Counter::ZERO,
|
||||
file_id,
|
||||
expanded_file_id,
|
||||
start_line,
|
||||
@ -243,8 +241,8 @@ impl CounterMappingRegion {
|
||||
end_col: u32,
|
||||
) -> Self {
|
||||
Self {
|
||||
counter: Counter::zero(),
|
||||
false_counter: Counter::zero(),
|
||||
counter: Counter::ZERO,
|
||||
false_counter: Counter::ZERO,
|
||||
file_id,
|
||||
expanded_file_id: 0,
|
||||
start_line,
|
||||
@ -268,7 +266,7 @@ impl CounterMappingRegion {
|
||||
) -> Self {
|
||||
Self {
|
||||
counter,
|
||||
false_counter: Counter::zero(),
|
||||
false_counter: Counter::ZERO,
|
||||
file_id,
|
||||
expanded_file_id: 0,
|
||||
start_line,
|
||||
|
||||
@ -1,10 +1,8 @@
|
||||
use crate::coverageinfo::ffi::{Counter, CounterExpression, ExprKind};
|
||||
|
||||
use rustc_index::{IndexSlice, IndexVec};
|
||||
use rustc_middle::bug;
|
||||
use rustc_middle::mir::coverage::{
|
||||
CodeRegion, CounterId, ExpressionId, MappedExpressionIndex, Op, Operand,
|
||||
};
|
||||
use rustc_data_structures::fx::FxIndexSet;
|
||||
use rustc_index::IndexVec;
|
||||
use rustc_middle::mir::coverage::{CodeRegion, CounterId, ExpressionId, Op, Operand};
|
||||
use rustc_middle::ty::Instance;
|
||||
use rustc_middle::ty::TyCtxt;
|
||||
|
||||
@ -128,6 +126,58 @@ impl<'tcx> FunctionCoverage<'tcx> {
|
||||
self.unreachable_regions.push(region)
|
||||
}
|
||||
|
||||
/// Perform some simplifications to make the final coverage mappings
|
||||
/// slightly smaller.
|
||||
///
|
||||
/// This method mainly exists to preserve the simplifications that were
|
||||
/// already being performed by the Rust-side expression renumbering, so that
|
||||
/// the resulting coverage mappings don't get worse.
|
||||
pub(crate) fn simplify_expressions(&mut self) {
|
||||
// The set of expressions that either were optimized out entirely, or
|
||||
// have zero as both of their operands, and will therefore always have
|
||||
// a value of zero. Other expressions that refer to these as operands
|
||||
// can have those operands replaced with `Operand::Zero`.
|
||||
let mut zero_expressions = FxIndexSet::default();
|
||||
|
||||
// For each expression, perform simplifications based on lower-numbered
|
||||
// expressions, and then update the set of always-zero expressions if
|
||||
// necessary.
|
||||
// (By construction, expressions can only refer to other expressions
|
||||
// that have lower IDs, so one simplification pass is sufficient.)
|
||||
for (id, maybe_expression) in self.expressions.iter_enumerated_mut() {
|
||||
let Some(expression) = maybe_expression else {
|
||||
// If an expression is missing, it must have been optimized away,
|
||||
// so any operand that refers to it can be replaced with zero.
|
||||
zero_expressions.insert(id);
|
||||
continue;
|
||||
};
|
||||
|
||||
// If an operand refers to an expression that is always zero, then
|
||||
// that operand can be replaced with `Operand::Zero`.
|
||||
let maybe_set_operand_to_zero = |operand: &mut Operand| match &*operand {
|
||||
Operand::Expression(id) if zero_expressions.contains(id) => {
|
||||
*operand = Operand::Zero;
|
||||
}
|
||||
_ => (),
|
||||
};
|
||||
maybe_set_operand_to_zero(&mut expression.lhs);
|
||||
maybe_set_operand_to_zero(&mut expression.rhs);
|
||||
|
||||
// Coverage counter values cannot be negative, so if an expression
|
||||
// involves subtraction from zero, assume that its RHS must also be zero.
|
||||
// (Do this after simplifications that could set the LHS to zero.)
|
||||
if let Expression { lhs: Operand::Zero, op: Op::Subtract, .. } = expression {
|
||||
expression.rhs = Operand::Zero;
|
||||
}
|
||||
|
||||
// After the above simplifications, if both operands are zero, then
|
||||
// we know that this expression is always zero too.
|
||||
if let Expression { lhs: Operand::Zero, rhs: Operand::Zero, .. } = expression {
|
||||
zero_expressions.insert(id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the source hash, generated from the HIR node structure, and used to indicate whether
|
||||
/// or not the source code structure changed between different compilations.
|
||||
pub fn source_hash(&self) -> u64 {
|
||||
@ -146,8 +196,14 @@ impl<'tcx> FunctionCoverage<'tcx> {
|
||||
self.instance
|
||||
);
|
||||
|
||||
let counter_expressions = self.counter_expressions();
|
||||
// Expression IDs are indices into `self.expressions`, and on the LLVM
|
||||
// side they will be treated as indices into `counter_expressions`, so
|
||||
// the two vectors should correspond 1:1.
|
||||
assert_eq!(self.expressions.len(), counter_expressions.len());
|
||||
|
||||
let counter_regions = self.counter_regions();
|
||||
let (counter_expressions, expression_regions) = self.expressions_with_regions();
|
||||
let expression_regions = self.expression_regions();
|
||||
let unreachable_regions = self.unreachable_regions();
|
||||
|
||||
let counter_regions =
|
||||
@ -163,149 +219,53 @@ impl<'tcx> FunctionCoverage<'tcx> {
|
||||
})
|
||||
}
|
||||
|
||||
fn expressions_with_regions(
|
||||
&self,
|
||||
) -> (Vec<CounterExpression>, impl Iterator<Item = (Counter, &CodeRegion)>) {
|
||||
let mut counter_expressions = Vec::with_capacity(self.expressions.len());
|
||||
let mut expression_regions = Vec::with_capacity(self.expressions.len());
|
||||
let mut new_indexes = IndexVec::from_elem_n(None, self.expressions.len());
|
||||
/// Convert this function's coverage expression data into a form that can be
|
||||
/// passed through FFI to LLVM.
|
||||
fn counter_expressions(&self) -> Vec<CounterExpression> {
|
||||
// We know that LLVM will optimize out any unused expressions before
|
||||
// producing the final coverage map, so there's no need to do the same
|
||||
// thing on the Rust side unless we're confident we can do much better.
|
||||
// (See `CounterExpressionsMinimizer` in `CoverageMappingWriter.cpp`.)
|
||||
|
||||
// This closure converts any `Expression` operand (`lhs` or `rhs` of the `Op::Add` or
|
||||
// `Op::Subtract` operation) into its native `llvm::coverage::Counter::CounterKind` type
|
||||
// and value.
|
||||
//
|
||||
// Expressions will be returned from this function in a sequential vector (array) of
|
||||
// `CounterExpression`, so the expression IDs must be mapped from their original,
|
||||
// potentially sparse set of indexes.
|
||||
//
|
||||
// An `Expression` as an operand will have already been encountered as an `Expression` with
|
||||
// operands, so its new_index will already have been generated (as a 1-up index value).
|
||||
// (If an `Expression` as an operand does not have a corresponding new_index, it was
|
||||
// probably optimized out, after the expression was injected into the MIR, so it will
|
||||
// get a `CounterKind::Zero` instead.)
|
||||
//
|
||||
// In other words, an `Expression`s at any given index can include other expressions as
|
||||
// operands, but expression operands can only come from the subset of expressions having
|
||||
// `expression_index`s lower than the referencing `Expression`. Therefore, it is
|
||||
// reasonable to look up the new index of an expression operand while the `new_indexes`
|
||||
// vector is only complete up to the current `ExpressionIndex`.
|
||||
type NewIndexes = IndexSlice<ExpressionId, Option<MappedExpressionIndex>>;
|
||||
let id_to_counter = |new_indexes: &NewIndexes, operand: Operand| match operand {
|
||||
Operand::Zero => Some(Counter::zero()),
|
||||
Operand::Counter(id) => Some(Counter::counter_value_reference(id)),
|
||||
Operand::Expression(id) => {
|
||||
self.expressions
|
||||
.get(id)
|
||||
.expect("expression id is out of range")
|
||||
.as_ref()
|
||||
// If an expression was optimized out, assume it would have produced a count
|
||||
// of zero. This ensures that expressions dependent on optimized-out
|
||||
// expressions are still valid.
|
||||
.map_or(Some(Counter::zero()), |_| new_indexes[id].map(Counter::expression))
|
||||
}
|
||||
};
|
||||
|
||||
for (original_index, expression) in
|
||||
self.expressions.iter_enumerated().filter_map(|(original_index, entry)| {
|
||||
// Option::map() will return None to filter out missing expressions. This may happen
|
||||
// if, for example, a MIR-instrumented expression is removed during an optimization.
|
||||
entry.as_ref().map(|expression| (original_index, expression))
|
||||
self.expressions
|
||||
.iter()
|
||||
.map(|expression| match expression {
|
||||
None => {
|
||||
// This expression ID was allocated, but we never saw the
|
||||
// actual expression, so it must have been optimized out.
|
||||
// Replace it with a dummy expression, and let LLVM take
|
||||
// care of omitting it from the expression list.
|
||||
CounterExpression::DUMMY
|
||||
}
|
||||
&Some(Expression { lhs, op, rhs, .. }) => {
|
||||
// Convert the operands and operator as normal.
|
||||
CounterExpression::new(
|
||||
Counter::from_operand(lhs),
|
||||
match op {
|
||||
Op::Add => ExprKind::Add,
|
||||
Op::Subtract => ExprKind::Subtract,
|
||||
},
|
||||
Counter::from_operand(rhs),
|
||||
)
|
||||
}
|
||||
})
|
||||
{
|
||||
let optional_region = &expression.region;
|
||||
let Expression { lhs, op, rhs, .. } = *expression;
|
||||
.collect::<Vec<_>>()
|
||||
}
|
||||
|
||||
if let Some(Some((lhs_counter, mut rhs_counter))) = id_to_counter(&new_indexes, lhs)
|
||||
.map(|lhs_counter| {
|
||||
id_to_counter(&new_indexes, rhs).map(|rhs_counter| (lhs_counter, rhs_counter))
|
||||
})
|
||||
{
|
||||
if lhs_counter.is_zero() && op.is_subtract() {
|
||||
// The left side of a subtraction was probably optimized out. As an example,
|
||||
// a branch condition might be evaluated as a constant expression, and the
|
||||
// branch could be removed, dropping unused counters in the process.
|
||||
//
|
||||
// Since counters are unsigned, we must assume the result of the expression
|
||||
// can be no more and no less than zero. An expression known to evaluate to zero
|
||||
// does not need to be added to the coverage map.
|
||||
//
|
||||
// Coverage test `loops_branches.rs` includes multiple variations of branches
|
||||
// based on constant conditional (literal `true` or `false`), and demonstrates
|
||||
// that the expected counts are still correct.
|
||||
debug!(
|
||||
"Expression subtracts from zero (assume unreachable): \
|
||||
original_index={:?}, lhs={:?}, op={:?}, rhs={:?}, region={:?}",
|
||||
original_index, lhs, op, rhs, optional_region,
|
||||
);
|
||||
rhs_counter = Counter::zero();
|
||||
}
|
||||
debug_assert!(
|
||||
lhs_counter.is_zero()
|
||||
// Note: with `as usize` the ID _could_ overflow/wrap if `usize = u16`
|
||||
|| ((lhs_counter.zero_based_id() as usize)
|
||||
<= usize::max(self.counters.len(), self.expressions.len())),
|
||||
"lhs id={} > both counters.len()={} and expressions.len()={}
|
||||
({:?} {:?} {:?})",
|
||||
lhs_counter.zero_based_id(),
|
||||
self.counters.len(),
|
||||
self.expressions.len(),
|
||||
lhs_counter,
|
||||
op,
|
||||
rhs_counter,
|
||||
);
|
||||
|
||||
debug_assert!(
|
||||
rhs_counter.is_zero()
|
||||
// Note: with `as usize` the ID _could_ overflow/wrap if `usize = u16`
|
||||
|| ((rhs_counter.zero_based_id() as usize)
|
||||
<= usize::max(self.counters.len(), self.expressions.len())),
|
||||
"rhs id={} > both counters.len()={} and expressions.len()={}
|
||||
({:?} {:?} {:?})",
|
||||
rhs_counter.zero_based_id(),
|
||||
self.counters.len(),
|
||||
self.expressions.len(),
|
||||
lhs_counter,
|
||||
op,
|
||||
rhs_counter,
|
||||
);
|
||||
|
||||
// Both operands exist. `Expression` operands exist in `self.expressions` and have
|
||||
// been assigned a `new_index`.
|
||||
let mapped_expression_index =
|
||||
MappedExpressionIndex::from(counter_expressions.len());
|
||||
let expression = CounterExpression::new(
|
||||
lhs_counter,
|
||||
match op {
|
||||
Op::Add => ExprKind::Add,
|
||||
Op::Subtract => ExprKind::Subtract,
|
||||
},
|
||||
rhs_counter,
|
||||
);
|
||||
debug!(
|
||||
"Adding expression {:?} = {:?}, region: {:?}",
|
||||
mapped_expression_index, expression, optional_region
|
||||
);
|
||||
counter_expressions.push(expression);
|
||||
new_indexes[original_index] = Some(mapped_expression_index);
|
||||
if let Some(region) = optional_region {
|
||||
expression_regions.push((Counter::expression(mapped_expression_index), region));
|
||||
}
|
||||
} else {
|
||||
bug!(
|
||||
"expression has one or more missing operands \
|
||||
original_index={:?}, lhs={:?}, op={:?}, rhs={:?}, region={:?}",
|
||||
original_index,
|
||||
lhs,
|
||||
op,
|
||||
rhs,
|
||||
optional_region,
|
||||
);
|
||||
}
|
||||
}
|
||||
(counter_expressions, expression_regions.into_iter())
|
||||
fn expression_regions(&self) -> Vec<(Counter, &CodeRegion)> {
|
||||
// Find all of the expression IDs that weren't optimized out AND have
|
||||
// an attached code region, and return the corresponding mapping as a
|
||||
// counter/region pair.
|
||||
self.expressions
|
||||
.iter_enumerated()
|
||||
.filter_map(|(id, expression)| {
|
||||
let code_region = expression.as_ref()?.region.as_ref()?;
|
||||
Some((Counter::expression(id), code_region))
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
}
|
||||
|
||||
fn unreachable_regions(&self) -> impl Iterator<Item = (Counter, &CodeRegion)> {
|
||||
self.unreachable_regions.iter().map(|region| (Counter::zero(), region))
|
||||
self.unreachable_regions.iter().map(|region| (Counter::ZERO, region))
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,13 +1,14 @@
|
||||
use crate::common::CodegenCx;
|
||||
use crate::coverageinfo;
|
||||
use crate::coverageinfo::ffi::{Counter, CounterExpression, CounterMappingRegion};
|
||||
use crate::coverageinfo::ffi::CounterMappingRegion;
|
||||
use crate::coverageinfo::map_data::FunctionCoverage;
|
||||
use crate::llvm;
|
||||
|
||||
use rustc_codegen_ssa::traits::ConstMethods;
|
||||
use rustc_data_structures::fx::FxIndexSet;
|
||||
use rustc_hir::def::DefKind;
|
||||
use rustc_hir::def_id::DefId;
|
||||
use rustc_llvm::RustString;
|
||||
use rustc_index::IndexVec;
|
||||
use rustc_middle::bug;
|
||||
use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags;
|
||||
use rustc_middle::mir::coverage::CodeRegion;
|
||||
@ -55,21 +56,21 @@ pub fn finalize(cx: &CodegenCx<'_, '_>) {
|
||||
return;
|
||||
}
|
||||
|
||||
let mut mapgen = CoverageMapGenerator::new(tcx);
|
||||
let mut global_file_table = GlobalFileTable::new(tcx);
|
||||
|
||||
// Encode coverage mappings and generate function records
|
||||
let mut function_data = Vec::new();
|
||||
for (instance, function_coverage) in function_coverage_map {
|
||||
for (instance, mut function_coverage) in function_coverage_map {
|
||||
debug!("Generate function coverage for {}, {:?}", cx.codegen_unit.name(), instance);
|
||||
function_coverage.simplify_expressions();
|
||||
let function_coverage = function_coverage;
|
||||
|
||||
let mangled_function_name = tcx.symbol_name(instance).name;
|
||||
let source_hash = function_coverage.source_hash();
|
||||
let is_used = function_coverage.is_used();
|
||||
let (expressions, counter_regions) =
|
||||
function_coverage.get_expressions_and_counter_regions();
|
||||
|
||||
let coverage_mapping_buffer = llvm::build_byte_buffer(|coverage_mapping_buffer| {
|
||||
mapgen.write_coverage_mapping(expressions, counter_regions, coverage_mapping_buffer);
|
||||
});
|
||||
let coverage_mapping_buffer =
|
||||
encode_mappings_for_function(&mut global_file_table, &function_coverage);
|
||||
|
||||
if coverage_mapping_buffer.is_empty() {
|
||||
if function_coverage.is_used() {
|
||||
@ -87,19 +88,14 @@ pub fn finalize(cx: &CodegenCx<'_, '_>) {
|
||||
}
|
||||
|
||||
// Encode all filenames referenced by counters/expressions in this module
|
||||
let filenames_buffer = llvm::build_byte_buffer(|filenames_buffer| {
|
||||
coverageinfo::write_filenames_section_to_buffer(
|
||||
mapgen.filenames.iter().map(Symbol::as_str),
|
||||
filenames_buffer,
|
||||
);
|
||||
});
|
||||
let filenames_buffer = global_file_table.into_filenames_buffer();
|
||||
|
||||
let filenames_size = filenames_buffer.len();
|
||||
let filenames_val = cx.const_bytes(&filenames_buffer);
|
||||
let filenames_ref = coverageinfo::hash_bytes(&filenames_buffer);
|
||||
|
||||
// Generate the LLVM IR representation of the coverage map and store it in a well-known global
|
||||
let cov_data_val = mapgen.generate_coverage_map(cx, version, filenames_size, filenames_val);
|
||||
let cov_data_val = generate_coverage_map(cx, version, filenames_size, filenames_val);
|
||||
|
||||
let covfun_section_name = coverageinfo::covfun_section_name(cx);
|
||||
for (mangled_function_name, source_hash, is_used, coverage_mapping_buffer) in function_data {
|
||||
@ -118,13 +114,13 @@ pub fn finalize(cx: &CodegenCx<'_, '_>) {
|
||||
coverageinfo::save_cov_data_to_mod(cx, cov_data_val);
|
||||
}
|
||||
|
||||
struct CoverageMapGenerator {
|
||||
filenames: FxIndexSet<Symbol>,
|
||||
struct GlobalFileTable {
|
||||
global_file_table: FxIndexSet<Symbol>,
|
||||
}
|
||||
|
||||
impl CoverageMapGenerator {
|
||||
impl GlobalFileTable {
|
||||
fn new(tcx: TyCtxt<'_>) -> Self {
|
||||
let mut filenames = FxIndexSet::default();
|
||||
let mut global_file_table = FxIndexSet::default();
|
||||
// LLVM Coverage Mapping Format version 6 (zero-based encoded as 5)
|
||||
// requires setting the first filename to the compilation directory.
|
||||
// Since rustc generates coverage maps with relative paths, the
|
||||
@ -133,94 +129,114 @@ impl CoverageMapGenerator {
|
||||
let working_dir = Symbol::intern(
|
||||
&tcx.sess.opts.working_dir.remapped_path_if_available().to_string_lossy(),
|
||||
);
|
||||
filenames.insert(working_dir);
|
||||
Self { filenames }
|
||||
global_file_table.insert(working_dir);
|
||||
Self { global_file_table }
|
||||
}
|
||||
|
||||
/// Using the `expressions` and `counter_regions` collected for the current function, generate
|
||||
/// the `mapping_regions` and `virtual_file_mapping`, and capture any new filenames. Then use
|
||||
/// LLVM APIs to encode the `virtual_file_mapping`, `expressions`, and `mapping_regions` into
|
||||
/// the given `coverage_mapping` byte buffer, compliant with the LLVM Coverage Mapping format.
|
||||
fn write_coverage_mapping<'a>(
|
||||
&mut self,
|
||||
expressions: Vec<CounterExpression>,
|
||||
counter_regions: impl Iterator<Item = (Counter, &'a CodeRegion)>,
|
||||
coverage_mapping_buffer: &RustString,
|
||||
) {
|
||||
let mut counter_regions = counter_regions.collect::<Vec<_>>();
|
||||
if counter_regions.is_empty() {
|
||||
return;
|
||||
}
|
||||
fn global_file_id_for_file_name(&mut self, file_name: Symbol) -> u32 {
|
||||
let (global_file_id, _) = self.global_file_table.insert_full(file_name);
|
||||
global_file_id as u32
|
||||
}
|
||||
|
||||
let mut virtual_file_mapping = Vec::new();
|
||||
let mut mapping_regions = Vec::new();
|
||||
let mut current_file_name = None;
|
||||
let mut current_file_id = 0;
|
||||
fn into_filenames_buffer(self) -> Vec<u8> {
|
||||
// This method takes `self` so that the caller can't accidentally
|
||||
// modify the original file table after encoding it into a buffer.
|
||||
|
||||
// Convert the list of (Counter, CodeRegion) pairs to an array of `CounterMappingRegion`, sorted
|
||||
// by filename and position. Capture any new files to compute the `CounterMappingRegion`s
|
||||
// `file_id` (indexing files referenced by the current function), and construct the
|
||||
// function-specific `virtual_file_mapping` from `file_id` to its index in the module's
|
||||
// `filenames` array.
|
||||
counter_regions.sort_unstable_by_key(|(_counter, region)| *region);
|
||||
for (counter, region) in counter_regions {
|
||||
let CodeRegion { file_name, start_line, start_col, end_line, end_col } = *region;
|
||||
let same_file = current_file_name.is_some_and(|p| p == file_name);
|
||||
if !same_file {
|
||||
if current_file_name.is_some() {
|
||||
current_file_id += 1;
|
||||
}
|
||||
current_file_name = Some(file_name);
|
||||
debug!(" file_id: {} = '{:?}'", current_file_id, file_name);
|
||||
let (filenames_index, _) = self.filenames.insert_full(file_name);
|
||||
virtual_file_mapping.push(filenames_index as u32);
|
||||
}
|
||||
debug!("Adding counter {:?} to map for {:?}", counter, region);
|
||||
llvm::build_byte_buffer(|buffer| {
|
||||
coverageinfo::write_filenames_section_to_buffer(
|
||||
self.global_file_table.iter().map(Symbol::as_str),
|
||||
buffer,
|
||||
);
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Using the expressions and counter regions collected for a single function,
|
||||
/// generate the variable-sized payload of its corresponding `__llvm_covfun`
|
||||
/// entry. The payload is returned as a vector of bytes.
|
||||
///
|
||||
/// Newly-encountered filenames will be added to the global file table.
|
||||
fn encode_mappings_for_function(
|
||||
global_file_table: &mut GlobalFileTable,
|
||||
function_coverage: &FunctionCoverage<'_>,
|
||||
) -> Vec<u8> {
|
||||
let (expressions, counter_regions) = function_coverage.get_expressions_and_counter_regions();
|
||||
|
||||
let mut counter_regions = counter_regions.collect::<Vec<_>>();
|
||||
if counter_regions.is_empty() {
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
let mut virtual_file_mapping = IndexVec::<u32, u32>::new();
|
||||
let mut mapping_regions = Vec::with_capacity(counter_regions.len());
|
||||
|
||||
// Sort the list of (counter, region) mapping pairs by region, so that they
|
||||
// can be grouped by filename. Prepare file IDs for each filename, and
|
||||
// prepare the mapping data so that we can pass it through FFI to LLVM.
|
||||
counter_regions.sort_by_key(|(_counter, region)| *region);
|
||||
for counter_regions_for_file in
|
||||
counter_regions.group_by(|(_, a), (_, b)| a.file_name == b.file_name)
|
||||
{
|
||||
// Look up (or allocate) the global file ID for this filename.
|
||||
let file_name = counter_regions_for_file[0].1.file_name;
|
||||
let global_file_id = global_file_table.global_file_id_for_file_name(file_name);
|
||||
|
||||
// Associate that global file ID with a local file ID for this function.
|
||||
let local_file_id: u32 = virtual_file_mapping.push(global_file_id);
|
||||
debug!(" file id: local {local_file_id} => global {global_file_id} = '{file_name:?}'");
|
||||
|
||||
// For each counter/region pair in this function+file, convert it to a
|
||||
// form suitable for FFI.
|
||||
for &(counter, region) in counter_regions_for_file {
|
||||
let CodeRegion { file_name: _, start_line, start_col, end_line, end_col } = *region;
|
||||
|
||||
debug!("Adding counter {counter:?} to map for {region:?}");
|
||||
mapping_regions.push(CounterMappingRegion::code_region(
|
||||
counter,
|
||||
current_file_id,
|
||||
local_file_id,
|
||||
start_line,
|
||||
start_col,
|
||||
end_line,
|
||||
end_col,
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
// Encode and append the current function's coverage mapping data
|
||||
// Encode the function's coverage mappings into a buffer.
|
||||
llvm::build_byte_buffer(|buffer| {
|
||||
coverageinfo::write_mapping_to_buffer(
|
||||
virtual_file_mapping,
|
||||
virtual_file_mapping.raw,
|
||||
expressions,
|
||||
mapping_regions,
|
||||
coverage_mapping_buffer,
|
||||
buffer,
|
||||
);
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// Construct coverage map header and the array of function records, and combine them into the
|
||||
/// coverage map. Save the coverage map data into the LLVM IR as a static global using a
|
||||
/// specific, well-known section and name.
|
||||
fn generate_coverage_map<'ll>(
|
||||
self,
|
||||
cx: &CodegenCx<'ll, '_>,
|
||||
version: u32,
|
||||
filenames_size: usize,
|
||||
filenames_val: &'ll llvm::Value,
|
||||
) -> &'ll llvm::Value {
|
||||
debug!("cov map: filenames_size = {}, 0-based version = {}", filenames_size, version);
|
||||
/// Construct coverage map header and the array of function records, and combine them into the
|
||||
/// coverage map. Save the coverage map data into the LLVM IR as a static global using a
|
||||
/// specific, well-known section and name.
|
||||
fn generate_coverage_map<'ll>(
|
||||
cx: &CodegenCx<'ll, '_>,
|
||||
version: u32,
|
||||
filenames_size: usize,
|
||||
filenames_val: &'ll llvm::Value,
|
||||
) -> &'ll llvm::Value {
|
||||
debug!("cov map: filenames_size = {}, 0-based version = {}", filenames_size, version);
|
||||
|
||||
// Create the coverage data header (Note, fields 0 and 2 are now always zero,
|
||||
// as of `llvm::coverage::CovMapVersion::Version4`.)
|
||||
let zero_was_n_records_val = cx.const_u32(0);
|
||||
let filenames_size_val = cx.const_u32(filenames_size as u32);
|
||||
let zero_was_coverage_size_val = cx.const_u32(0);
|
||||
let version_val = cx.const_u32(version);
|
||||
let cov_data_header_val = cx.const_struct(
|
||||
&[zero_was_n_records_val, filenames_size_val, zero_was_coverage_size_val, version_val],
|
||||
/*packed=*/ false,
|
||||
);
|
||||
// Create the coverage data header (Note, fields 0 and 2 are now always zero,
|
||||
// as of `llvm::coverage::CovMapVersion::Version4`.)
|
||||
let zero_was_n_records_val = cx.const_u32(0);
|
||||
let filenames_size_val = cx.const_u32(filenames_size as u32);
|
||||
let zero_was_coverage_size_val = cx.const_u32(0);
|
||||
let version_val = cx.const_u32(version);
|
||||
let cov_data_header_val = cx.const_struct(
|
||||
&[zero_was_n_records_val, filenames_size_val, zero_was_coverage_size_val, version_val],
|
||||
/*packed=*/ false,
|
||||
);
|
||||
|
||||
// Create the complete LLVM coverage data value to add to the LLVM IR
|
||||
cx.const_struct(&[cov_data_header_val, filenames_val], /*packed=*/ false)
|
||||
}
|
||||
// Create the complete LLVM coverage data value to add to the LLVM IR
|
||||
cx.const_struct(&[cov_data_header_val, filenames_val], /*packed=*/ false)
|
||||
}
|
||||
|
||||
/// Construct a function record and combine it with the function's coverage mapping data.
|
||||
@ -317,10 +333,10 @@ fn add_unused_functions(cx: &CodegenCx<'_, '_>) {
|
||||
{
|
||||
let codegen_fn_attrs = tcx.codegen_fn_attrs(non_codegenned_def_id);
|
||||
|
||||
// If a function is marked `#[no_coverage]`, then skip generating a
|
||||
// If a function is marked `#[coverage(off)]`, then skip generating a
|
||||
// dead code stub for it.
|
||||
if codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::NO_COVERAGE) {
|
||||
debug!("skipping unused fn marked #[no_coverage]: {:?}", non_codegenned_def_id);
|
||||
debug!("skipping unused fn marked #[coverage(off)]: {:?}", non_codegenned_def_id);
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
@ -16,7 +16,7 @@ use rustc_hir as hir;
|
||||
use rustc_hir::def_id::DefId;
|
||||
use rustc_llvm::RustString;
|
||||
use rustc_middle::bug;
|
||||
use rustc_middle::mir::coverage::{CodeRegion, CounterId, CoverageKind, ExpressionId, Op, Operand};
|
||||
use rustc_middle::mir::coverage::{CounterId, CoverageKind};
|
||||
use rustc_middle::mir::Coverage;
|
||||
use rustc_middle::ty;
|
||||
use rustc_middle::ty::layout::{FnAbiOf, HasTyCtxt};
|
||||
@ -104,144 +104,67 @@ impl<'tcx> CoverageInfoBuilderMethods<'tcx> for Builder<'_, '_, 'tcx> {
|
||||
fn add_coverage(&mut self, instance: Instance<'tcx>, coverage: &Coverage) {
|
||||
let bx = self;
|
||||
|
||||
let Some(coverage_context) = bx.coverage_context() else { return };
|
||||
let mut coverage_map = coverage_context.function_coverage_map.borrow_mut();
|
||||
let func_coverage = coverage_map
|
||||
.entry(instance)
|
||||
.or_insert_with(|| FunctionCoverage::new(bx.tcx(), instance));
|
||||
|
||||
let Coverage { kind, code_region } = coverage.clone();
|
||||
match kind {
|
||||
CoverageKind::Counter { function_source_hash, id } => {
|
||||
if bx.set_function_source_hash(instance, function_source_hash) {
|
||||
// If `set_function_source_hash()` returned true, the coverage map is enabled,
|
||||
// so continue adding the counter.
|
||||
if let Some(code_region) = code_region {
|
||||
// Note: Some counters do not have code regions, but may still be referenced
|
||||
// from expressions. In that case, don't add the counter to the coverage map,
|
||||
// but do inject the counter intrinsic.
|
||||
bx.add_coverage_counter(instance, id, code_region);
|
||||
}
|
||||
debug!(
|
||||
"ensuring function source hash is set for instance={:?}; function_source_hash={}",
|
||||
instance, function_source_hash,
|
||||
);
|
||||
func_coverage.set_function_source_hash(function_source_hash);
|
||||
|
||||
let coverageinfo = bx.tcx().coverageinfo(instance.def);
|
||||
|
||||
let fn_name = bx.get_pgo_func_name_var(instance);
|
||||
let hash = bx.const_u64(function_source_hash);
|
||||
let num_counters = bx.const_u32(coverageinfo.num_counters);
|
||||
let index = bx.const_u32(id.as_u32());
|
||||
if let Some(code_region) = code_region {
|
||||
// Note: Some counters do not have code regions, but may still be referenced
|
||||
// from expressions. In that case, don't add the counter to the coverage map,
|
||||
// but do inject the counter intrinsic.
|
||||
debug!(
|
||||
"codegen intrinsic instrprof.increment(fn_name={:?}, hash={:?}, num_counters={:?}, index={:?})",
|
||||
fn_name, hash, num_counters, index,
|
||||
"adding counter to coverage_map: instance={:?}, id={:?}, region={:?}",
|
||||
instance, id, code_region,
|
||||
);
|
||||
bx.instrprof_increment(fn_name, hash, num_counters, index);
|
||||
func_coverage.add_counter(id, code_region);
|
||||
}
|
||||
// We need to explicitly drop the `RefMut` before calling into `instrprof_increment`,
|
||||
// as that needs an exclusive borrow.
|
||||
drop(coverage_map);
|
||||
|
||||
let coverageinfo = bx.tcx().coverageinfo(instance.def);
|
||||
|
||||
let fn_name = bx.get_pgo_func_name_var(instance);
|
||||
let hash = bx.const_u64(function_source_hash);
|
||||
let num_counters = bx.const_u32(coverageinfo.num_counters);
|
||||
let index = bx.const_u32(id.as_u32());
|
||||
debug!(
|
||||
"codegen intrinsic instrprof.increment(fn_name={:?}, hash={:?}, num_counters={:?}, index={:?})",
|
||||
fn_name, hash, num_counters, index,
|
||||
);
|
||||
bx.instrprof_increment(fn_name, hash, num_counters, index);
|
||||
}
|
||||
CoverageKind::Expression { id, lhs, op, rhs } => {
|
||||
bx.add_coverage_counter_expression(instance, id, lhs, op, rhs, code_region);
|
||||
debug!(
|
||||
"adding counter expression to coverage_map: instance={:?}, id={:?}, {:?} {:?} {:?}; region: {:?}",
|
||||
instance, id, lhs, op, rhs, code_region,
|
||||
);
|
||||
func_coverage.add_counter_expression(id, lhs, op, rhs, code_region);
|
||||
}
|
||||
CoverageKind::Unreachable => {
|
||||
bx.add_coverage_unreachable(
|
||||
instance,
|
||||
code_region.expect("unreachable regions always have code regions"),
|
||||
let code_region =
|
||||
code_region.expect("unreachable regions always have code regions");
|
||||
debug!(
|
||||
"adding unreachable code to coverage_map: instance={:?}, at {:?}",
|
||||
instance, code_region,
|
||||
);
|
||||
func_coverage.add_unreachable_region(code_region);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// These methods used to be part of trait `CoverageInfoBuilderMethods`, but
|
||||
// after moving most coverage code out of SSA they are now just ordinary methods.
|
||||
impl<'tcx> Builder<'_, '_, 'tcx> {
|
||||
/// Returns true if the function source hash was added to the coverage map (even if it had
|
||||
/// already been added, for this instance). Returns false *only* if `-C instrument-coverage` is
|
||||
/// not enabled (a coverage map is not being generated).
|
||||
fn set_function_source_hash(
|
||||
&mut self,
|
||||
instance: Instance<'tcx>,
|
||||
function_source_hash: u64,
|
||||
) -> bool {
|
||||
if let Some(coverage_context) = self.coverage_context() {
|
||||
debug!(
|
||||
"ensuring function source hash is set for instance={:?}; function_source_hash={}",
|
||||
instance, function_source_hash,
|
||||
);
|
||||
let mut coverage_map = coverage_context.function_coverage_map.borrow_mut();
|
||||
coverage_map
|
||||
.entry(instance)
|
||||
.or_insert_with(|| FunctionCoverage::new(self.tcx, instance))
|
||||
.set_function_source_hash(function_source_hash);
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns true if the counter was added to the coverage map; false if `-C instrument-coverage`
|
||||
/// is not enabled (a coverage map is not being generated).
|
||||
fn add_coverage_counter(
|
||||
&mut self,
|
||||
instance: Instance<'tcx>,
|
||||
id: CounterId,
|
||||
region: CodeRegion,
|
||||
) -> bool {
|
||||
if let Some(coverage_context) = self.coverage_context() {
|
||||
debug!(
|
||||
"adding counter to coverage_map: instance={:?}, id={:?}, region={:?}",
|
||||
instance, id, region,
|
||||
);
|
||||
let mut coverage_map = coverage_context.function_coverage_map.borrow_mut();
|
||||
coverage_map
|
||||
.entry(instance)
|
||||
.or_insert_with(|| FunctionCoverage::new(self.tcx, instance))
|
||||
.add_counter(id, region);
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns true if the expression was added to the coverage map; false if
|
||||
/// `-C instrument-coverage` is not enabled (a coverage map is not being generated).
|
||||
fn add_coverage_counter_expression(
|
||||
&mut self,
|
||||
instance: Instance<'tcx>,
|
||||
id: ExpressionId,
|
||||
lhs: Operand,
|
||||
op: Op,
|
||||
rhs: Operand,
|
||||
region: Option<CodeRegion>,
|
||||
) -> bool {
|
||||
if let Some(coverage_context) = self.coverage_context() {
|
||||
debug!(
|
||||
"adding counter expression to coverage_map: instance={:?}, id={:?}, {:?} {:?} {:?}; \
|
||||
region: {:?}",
|
||||
instance, id, lhs, op, rhs, region,
|
||||
);
|
||||
let mut coverage_map = coverage_context.function_coverage_map.borrow_mut();
|
||||
coverage_map
|
||||
.entry(instance)
|
||||
.or_insert_with(|| FunctionCoverage::new(self.tcx, instance))
|
||||
.add_counter_expression(id, lhs, op, rhs, region);
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns true if the region was added to the coverage map; false if `-C instrument-coverage`
|
||||
/// is not enabled (a coverage map is not being generated).
|
||||
fn add_coverage_unreachable(&mut self, instance: Instance<'tcx>, region: CodeRegion) -> bool {
|
||||
if let Some(coverage_context) = self.coverage_context() {
|
||||
debug!(
|
||||
"adding unreachable code to coverage_map: instance={:?}, at {:?}",
|
||||
instance, region,
|
||||
);
|
||||
let mut coverage_map = coverage_context.function_coverage_map.borrow_mut();
|
||||
coverage_map
|
||||
.entry(instance)
|
||||
.or_insert_with(|| FunctionCoverage::new(self.tcx, instance))
|
||||
.add_unreachable_region(region);
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn declare_unused_fn<'tcx>(cx: &CodegenCx<'_, 'tcx>, def_id: DefId) -> Instance<'tcx> {
|
||||
let tcx = cx.tcx;
|
||||
|
||||
|
||||
@ -20,7 +20,7 @@ pub fn compute_mir_scopes<'ll, 'tcx>(
|
||||
cx: &CodegenCx<'ll, 'tcx>,
|
||||
instance: Instance<'tcx>,
|
||||
mir: &Body<'tcx>,
|
||||
debug_context: &mut FunctionDebugContext<&'ll DIScope, &'ll DILocation>,
|
||||
debug_context: &mut FunctionDebugContext<'tcx, &'ll DIScope, &'ll DILocation>,
|
||||
) {
|
||||
// Find all scopes with variables defined in them.
|
||||
let variables = if cx.sess().opts.debuginfo == DebugInfo::Full {
|
||||
@ -51,7 +51,7 @@ fn make_mir_scope<'ll, 'tcx>(
|
||||
instance: Instance<'tcx>,
|
||||
mir: &Body<'tcx>,
|
||||
variables: &Option<BitSet<SourceScope>>,
|
||||
debug_context: &mut FunctionDebugContext<&'ll DIScope, &'ll DILocation>,
|
||||
debug_context: &mut FunctionDebugContext<'tcx, &'ll DIScope, &'ll DILocation>,
|
||||
instantiated: &mut BitSet<SourceScope>,
|
||||
scope: SourceScope,
|
||||
) {
|
||||
@ -68,7 +68,7 @@ fn make_mir_scope<'ll, 'tcx>(
|
||||
let file = cx.sess().source_map().lookup_source_file(mir.span.lo());
|
||||
debug_context.scopes[scope] = DebugScope {
|
||||
file_start_pos: file.start_pos,
|
||||
file_end_pos: file.end_pos,
|
||||
file_end_pos: file.end_position(),
|
||||
..debug_context.scopes[scope]
|
||||
};
|
||||
instantiated.insert(scope);
|
||||
@ -86,27 +86,31 @@ fn make_mir_scope<'ll, 'tcx>(
|
||||
let loc = cx.lookup_debug_loc(scope_data.span.lo());
|
||||
let file_metadata = file_metadata(cx, &loc.file);
|
||||
|
||||
let dbg_scope = match scope_data.inlined {
|
||||
let parent_dbg_scope = match scope_data.inlined {
|
||||
Some((callee, _)) => {
|
||||
// FIXME(eddyb) this would be `self.monomorphize(&callee)`
|
||||
// if this is moved to `rustc_codegen_ssa::mir::debuginfo`.
|
||||
let callee = cx.tcx.subst_and_normalize_erasing_regions(
|
||||
let callee = cx.tcx.instantiate_and_normalize_erasing_regions(
|
||||
instance.args,
|
||||
ty::ParamEnv::reveal_all(),
|
||||
ty::EarlyBinder::bind(callee),
|
||||
);
|
||||
let callee_fn_abi = cx.fn_abi_of_instance(callee, ty::List::empty());
|
||||
cx.dbg_scope_fn(callee, callee_fn_abi, None)
|
||||
debug_context.inlined_function_scopes.entry(callee).or_insert_with(|| {
|
||||
let callee_fn_abi = cx.fn_abi_of_instance(callee, ty::List::empty());
|
||||
cx.dbg_scope_fn(callee, callee_fn_abi, None)
|
||||
})
|
||||
}
|
||||
None => unsafe {
|
||||
llvm::LLVMRustDIBuilderCreateLexicalBlock(
|
||||
DIB(cx),
|
||||
parent_scope.dbg_scope,
|
||||
file_metadata,
|
||||
loc.line,
|
||||
loc.col,
|
||||
)
|
||||
},
|
||||
None => parent_scope.dbg_scope,
|
||||
};
|
||||
|
||||
let dbg_scope = unsafe {
|
||||
llvm::LLVMRustDIBuilderCreateLexicalBlock(
|
||||
DIB(cx),
|
||||
parent_dbg_scope,
|
||||
file_metadata,
|
||||
loc.line,
|
||||
loc.col,
|
||||
)
|
||||
};
|
||||
|
||||
let inlined_at = scope_data.inlined.map(|(_, callsite_span)| {
|
||||
@ -120,7 +124,7 @@ fn make_mir_scope<'ll, 'tcx>(
|
||||
dbg_scope,
|
||||
inlined_at: inlined_at.or(parent_scope.inlined_at),
|
||||
file_start_pos: loc.file.start_pos,
|
||||
file_end_pos: loc.file.end_pos,
|
||||
file_end_pos: loc.file.end_position(),
|
||||
};
|
||||
instantiated.insert(scope);
|
||||
}
|
||||
|
||||
@ -445,9 +445,9 @@ pub fn type_di_node<'ll, 'tcx>(cx: &CodegenCx<'ll, 'tcx>, t: Ty<'tcx>) -> &'ll D
|
||||
ty::RawPtr(ty::TypeAndMut { ty: pointee_type, .. }) | ty::Ref(_, pointee_type, _) => {
|
||||
build_pointer_or_reference_di_node(cx, t, pointee_type, unique_type_id)
|
||||
}
|
||||
// Box<T, A> may have a non-ZST allocator A. In that case, we
|
||||
// Box<T, A> may have a non-1-ZST allocator A. In that case, we
|
||||
// cannot treat Box<T, A> as just an owned alias of `*mut T`.
|
||||
ty::Adt(def, args) if def.is_box() && cx.layout_of(args.type_at(1)).is_zst() => {
|
||||
ty::Adt(def, args) if def.is_box() && cx.layout_of(args.type_at(1)).is_1zst() => {
|
||||
build_pointer_or_reference_di_node(cx, t, t.boxed_ty(), unique_type_id)
|
||||
}
|
||||
ty::FnDef(..) | ty::FnPtr(_) => build_subroutine_type_di_node(cx, unique_type_id),
|
||||
|
||||
@ -263,11 +263,11 @@ impl CodegenCx<'_, '_> {
|
||||
pub fn lookup_debug_loc(&self, pos: BytePos) -> DebugLoc {
|
||||
let (file, line, col) = match self.sess().source_map().lookup_line(pos) {
|
||||
Ok(SourceFileAndLine { sf: file, line }) => {
|
||||
let line_pos = file.lines(|lines| lines[line]);
|
||||
let line_pos = file.lines()[line];
|
||||
|
||||
// Use 1-based indexing.
|
||||
let line = (line + 1) as u32;
|
||||
let col = (pos - line_pos).to_u32() + 1;
|
||||
let col = (file.relative_position(pos) - line_pos).to_u32() + 1;
|
||||
|
||||
(file, line, col)
|
||||
}
|
||||
@ -292,7 +292,7 @@ impl<'ll, 'tcx> DebugInfoMethods<'tcx> for CodegenCx<'ll, 'tcx> {
|
||||
fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
|
||||
llfn: &'ll Value,
|
||||
mir: &mir::Body<'tcx>,
|
||||
) -> Option<FunctionDebugContext<&'ll DIScope, &'ll DILocation>> {
|
||||
) -> Option<FunctionDebugContext<'tcx, &'ll DIScope, &'ll DILocation>> {
|
||||
if self.sess().opts.debuginfo == DebugInfo::None {
|
||||
return None;
|
||||
}
|
||||
@ -304,8 +304,10 @@ impl<'ll, 'tcx> DebugInfoMethods<'tcx> for CodegenCx<'ll, 'tcx> {
|
||||
file_start_pos: BytePos(0),
|
||||
file_end_pos: BytePos(0),
|
||||
};
|
||||
let mut fn_debug_context =
|
||||
FunctionDebugContext { scopes: IndexVec::from_elem(empty_scope, &mir.source_scopes) };
|
||||
let mut fn_debug_context = FunctionDebugContext {
|
||||
scopes: IndexVec::from_elem(empty_scope, &mir.source_scopes),
|
||||
inlined_function_scopes: Default::default(),
|
||||
};
|
||||
|
||||
// Fill in all the scopes, with the information from the MIR body.
|
||||
compute_mir_scopes(self, instance, mir, &mut fn_debug_context);
|
||||
@ -347,6 +349,7 @@ impl<'ll, 'tcx> DebugInfoMethods<'tcx> for CodegenCx<'ll, 'tcx> {
|
||||
type_names::push_generic_params(
|
||||
tcx,
|
||||
tcx.normalize_erasing_regions(ty::ParamEnv::reveal_all(), args),
|
||||
enclosing_fn_def_id,
|
||||
&mut name,
|
||||
);
|
||||
|
||||
@ -526,7 +529,7 @@ impl<'ll, 'tcx> DebugInfoMethods<'tcx> for CodegenCx<'ll, 'tcx> {
|
||||
if let Some(impl_def_id) = cx.tcx.impl_of_method(instance.def_id()) {
|
||||
// If the method does *not* belong to a trait, proceed
|
||||
if cx.tcx.trait_id_of_impl(impl_def_id).is_none() {
|
||||
let impl_self_ty = cx.tcx.subst_and_normalize_erasing_regions(
|
||||
let impl_self_ty = cx.tcx.instantiate_and_normalize_erasing_regions(
|
||||
instance.args,
|
||||
ty::ParamEnv::reveal_all(),
|
||||
cx.tcx.type_of(impl_def_id),
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user