diff --git a/compiler/rustc_graphviz/src/lib.rs b/compiler/rustc_graphviz/src/lib.rs
index 3c1bb5532661a..401d3f6689c99 100644
--- a/compiler/rustc_graphviz/src/lib.rs
+++ b/compiler/rustc_graphviz/src/lib.rs
@@ -471,7 +471,11 @@ pub trait Labeller<'a> {
/// Escape tags in such a way that it is suitable for inclusion in a
/// Graphviz HTML label.
pub fn escape_html(s: &str) -> String {
- s.replace('&', "&").replace('\"', """).replace('<', "<").replace('>', ">")
+ s.replace('&', "&")
+ .replace('\"', """)
+ .replace('<', "<")
+ .replace('>', ">")
+ .replace('\n', "
")
}
impl<'a> LabelText<'a> {
diff --git a/compiler/rustc_middle/src/mir/generic_graphviz.rs b/compiler/rustc_middle/src/mir/generic_graphviz.rs
index 11ac45943ac58..ccae7e159b1f1 100644
--- a/compiler/rustc_middle/src/mir/generic_graphviz.rs
+++ b/compiler/rustc_middle/src/mir/generic_graphviz.rs
@@ -126,7 +126,7 @@ impl<
write!(
w,
r#"
{} |
"#,
- dot::escape_html(§ion).replace('\n', "
")
+ dot::escape_html(§ion)
)?;
}
@@ -147,7 +147,7 @@ impl<
let src = self.node(source);
let trg = self.node(target);
let escaped_edge_label = if let Some(edge_label) = edge_labels.get(index) {
- dot::escape_html(edge_label).replace('\n', r#"
"#)
+ dot::escape_html(edge_label)
} else {
"".to_owned()
};
@@ -162,8 +162,7 @@ impl<
where
W: Write,
{
- let lines = label.split('\n').map(|s| dot::escape_html(s)).collect::>();
- let escaped_label = lines.join(r#"
"#);
+ let escaped_label = dot::escape_html(label);
writeln!(w, r#" label=<
{}
>;"#, escaped_label)
}
diff --git a/compiler/rustc_middle/src/mir/visit.rs b/compiler/rustc_middle/src/mir/visit.rs
index ddcf3711bfc95..d87eb28970e41 100644
--- a/compiler/rustc_middle/src/mir/visit.rs
+++ b/compiler/rustc_middle/src/mir/visit.rs
@@ -1320,6 +1320,15 @@ impl PlaceContext {
)
}
+ /// Returns `true` if this place context represents an address-of.
+ pub fn is_address_of(&self) -> bool {
+ matches!(
+ self,
+ PlaceContext::NonMutatingUse(NonMutatingUseContext::AddressOf)
+ | PlaceContext::MutatingUse(MutatingUseContext::AddressOf)
+ )
+ }
+
/// Returns `true` if this place context represents a storage live or storage dead marker.
#[inline]
pub fn is_storage_marker(&self) -> bool {
diff --git a/compiler/rustc_mir_dataflow/src/framework/graphviz.rs b/compiler/rustc_mir_dataflow/src/framework/graphviz.rs
index 579fe68a14935..c9d5601f2074c 100644
--- a/compiler/rustc_mir_dataflow/src/framework/graphviz.rs
+++ b/compiler/rustc_mir_dataflow/src/framework/graphviz.rs
@@ -475,7 +475,10 @@ where
r#"{state} | "#,
colspan = this.style.num_state_columns(),
fmt = fmt,
- state = format!("{:?}", DebugWithAdapter { this: state, ctxt: analysis }),
+ state = dot::escape_html(&format!(
+ "{:?}",
+ DebugWithAdapter { this: state, ctxt: analysis }
+ )),
)
})
}
diff --git a/compiler/rustc_mir_dataflow/src/framework/lattice.rs b/compiler/rustc_mir_dataflow/src/framework/lattice.rs
index d6b89eb82275e..f0e75c53ea15c 100644
--- a/compiler/rustc_mir_dataflow/src/framework/lattice.rs
+++ b/compiler/rustc_mir_dataflow/src/framework/lattice.rs
@@ -73,6 +73,16 @@ pub trait MeetSemiLattice: Eq {
fn meet(&mut self, other: &Self) -> bool;
}
+/// A set that has a "bottom" element, which is less than or equal to any other element.
+pub trait HasBottom {
+ fn bottom() -> Self;
+}
+
+/// A set that has a "top" element, which is greater than or equal to any other element.
+pub trait HasTop {
+ fn top() -> Self;
+}
+
/// A `bool` is a "two-point" lattice with `true` as the top element and `false` as the bottom:
///
/// ```text
@@ -102,6 +112,18 @@ impl MeetSemiLattice for bool {
}
}
+impl HasBottom for bool {
+ fn bottom() -> Self {
+ false
+ }
+}
+
+impl HasTop for bool {
+ fn top() -> Self {
+ true
+ }
+}
+
/// A tuple (or list) of lattices is itself a lattice whose least upper bound is the concatenation
/// of the least upper bounds of each element of the tuple (or list).
///
@@ -250,3 +272,15 @@ impl MeetSemiLattice for FlatSet {
true
}
}
+
+impl HasBottom for FlatSet {
+ fn bottom() -> Self {
+ Self::Bottom
+ }
+}
+
+impl HasTop for FlatSet {
+ fn top() -> Self {
+ Self::Top
+ }
+}
diff --git a/compiler/rustc_mir_dataflow/src/lib.rs b/compiler/rustc_mir_dataflow/src/lib.rs
index b471d04fd606f..7f40cfca32fff 100644
--- a/compiler/rustc_mir_dataflow/src/lib.rs
+++ b/compiler/rustc_mir_dataflow/src/lib.rs
@@ -41,6 +41,7 @@ pub mod move_paths;
pub mod rustc_peek;
pub mod storage;
pub mod un_derefer;
+pub mod value_analysis;
pub(crate) mod indexes {
pub(crate) use super::move_paths::MovePathIndex;
diff --git a/compiler/rustc_mir_dataflow/src/value_analysis.rs b/compiler/rustc_mir_dataflow/src/value_analysis.rs
new file mode 100644
index 0000000000000..db4b0a3deda9d
--- /dev/null
+++ b/compiler/rustc_mir_dataflow/src/value_analysis.rs
@@ -0,0 +1,927 @@
+//! This module provides a framework on top of the normal MIR dataflow framework to simplify the
+//! implementation of analyses that track information about the values stored in certain places.
+//! We are using the term "place" here to refer to a `mir::Place` (a place expression) instead of
+//! an `interpret::Place` (a memory location).
+//!
+//! The default methods of [`ValueAnalysis`] (prefixed with `super_` instead of `handle_`)
+//! provide some behavior that should be valid for all abstract domains that are based only on the
+//! value stored in a certain place. On top of these default rules, an implementation should
+//! override some of the `handle_` methods. For an example, see `ConstAnalysis`.
+//!
+//! An implementation must also provide a [`Map`]. Before the analysis begins, all places that
+//! should be tracked during the analysis must be registered. During the analysis, no new places
+//! can be registered. The [`State`] can be queried to retrieve the abstract value stored for a
+//! certain place by passing the map.
+//!
+//! This framework is currently experimental. Originally, it supported shared references and enum
+//! variants. However, it was discovered that both of these were unsound, and especially references
+//! had subtle but serious issues. In the future, they could be added back in, but we should clarify
+//! the rules for optimizations that rely on the aliasing model first.
+//!
+//!
+//! # Notes
+//!
+//! - The bottom state denotes uninitialized memory. Because we are only doing a sound approximation
+//! of the actual execution, we can also use this state for places where access would be UB.
+//!
+//! - The assignment logic in `State::assign_place_idx` assumes that the places are non-overlapping,
+//! or identical. Note that this refers to place expressions, not memory locations.
+//!
+//! - Currently, places that have their reference taken cannot be tracked. Although this would be
+//! possible, it has to rely on some aliasing model, which we are not ready to commit to yet.
+//! Because of that, we can assume that the only way to change the value behind a tracked place is
+//! by direct assignment.
+
+use std::fmt::{Debug, Formatter};
+
+use rustc_data_structures::fx::FxHashMap;
+use rustc_index::vec::IndexVec;
+use rustc_middle::mir::visit::{MutatingUseContext, PlaceContext, Visitor};
+use rustc_middle::mir::*;
+use rustc_middle::ty::{self, Ty, TyCtxt};
+use rustc_target::abi::VariantIdx;
+
+use crate::lattice::{HasBottom, HasTop};
+use crate::{
+ fmt::DebugWithContext, Analysis, AnalysisDomain, CallReturnPlaces, JoinSemiLattice,
+ SwitchIntEdgeEffects,
+};
+
+pub trait ValueAnalysis<'tcx> {
+ /// For each place of interest, the analysis tracks a value of the given type.
+ type Value: Clone + JoinSemiLattice + HasBottom + HasTop;
+
+ const NAME: &'static str;
+
+ fn map(&self) -> ⤅
+
+ fn handle_statement(&self, statement: &Statement<'tcx>, state: &mut State) {
+ self.super_statement(statement, state)
+ }
+
+ fn super_statement(&self, statement: &Statement<'tcx>, state: &mut State) {
+ match &statement.kind {
+ StatementKind::Assign(box (place, rvalue)) => {
+ self.handle_assign(*place, rvalue, state);
+ }
+ StatementKind::SetDiscriminant { .. } => {
+ // Could treat this as writing a constant to a pseudo-place.
+ // But discriminants are currently not tracked, so we do nothing.
+ // Related: https://github.com/rust-lang/unsafe-code-guidelines/issues/84
+ }
+ StatementKind::Intrinsic(box intrinsic) => {
+ self.handle_intrinsic(intrinsic, state);
+ }
+ StatementKind::StorageLive(local) | StatementKind::StorageDead(local) => {
+ // StorageLive leaves the local in an uninitialized state.
+ // StorageDead makes it UB to access the local afterwards.
+ state.flood_with(Place::from(*local).as_ref(), self.map(), Self::Value::bottom());
+ }
+ StatementKind::Deinit(box place) => {
+ // Deinit makes the place uninitialized.
+ state.flood_with(place.as_ref(), self.map(), Self::Value::bottom());
+ }
+ StatementKind::Retag(..) => {
+ // We don't track references.
+ }
+ StatementKind::Nop
+ | StatementKind::FakeRead(..)
+ | StatementKind::Coverage(..)
+ | StatementKind::AscribeUserType(..) => (),
+ }
+ }
+
+ fn handle_intrinsic(
+ &self,
+ intrinsic: &NonDivergingIntrinsic<'tcx>,
+ state: &mut State,
+ ) {
+ self.super_intrinsic(intrinsic, state);
+ }
+
+ fn super_intrinsic(
+ &self,
+ intrinsic: &NonDivergingIntrinsic<'tcx>,
+ state: &mut State,
+ ) {
+ match intrinsic {
+ NonDivergingIntrinsic::Assume(..) => {
+ // Could use this, but ignoring it is sound.
+ }
+ NonDivergingIntrinsic::CopyNonOverlapping(CopyNonOverlapping { dst, .. }) => {
+ if let Some(place) = dst.place() {
+ state.flood(place.as_ref(), self.map());
+ }
+ }
+ }
+ }
+
+ fn handle_assign(
+ &self,
+ target: Place<'tcx>,
+ rvalue: &Rvalue<'tcx>,
+ state: &mut State,
+ ) {
+ self.super_assign(target, rvalue, state)
+ }
+
+ fn super_assign(
+ &self,
+ target: Place<'tcx>,
+ rvalue: &Rvalue<'tcx>,
+ state: &mut State,
+ ) {
+ let result = self.handle_rvalue(rvalue, state);
+ state.assign(target.as_ref(), result, self.map());
+ }
+
+ fn handle_rvalue(
+ &self,
+ rvalue: &Rvalue<'tcx>,
+ state: &mut State,
+ ) -> ValueOrPlace {
+ self.super_rvalue(rvalue, state)
+ }
+
+ fn super_rvalue(
+ &self,
+ rvalue: &Rvalue<'tcx>,
+ state: &mut State,
+ ) -> ValueOrPlace {
+ match rvalue {
+ Rvalue::Use(operand) => self.handle_operand(operand, state),
+ Rvalue::CopyForDeref(place) => self.handle_operand(&Operand::Copy(*place), state),
+ Rvalue::Ref(..) | Rvalue::AddressOf(..) => {
+ // We don't track such places.
+ ValueOrPlace::top()
+ }
+ Rvalue::Repeat(..)
+ | Rvalue::ThreadLocalRef(..)
+ | Rvalue::Len(..)
+ | Rvalue::Cast(..)
+ | Rvalue::BinaryOp(..)
+ | Rvalue::CheckedBinaryOp(..)
+ | Rvalue::NullaryOp(..)
+ | Rvalue::UnaryOp(..)
+ | Rvalue::Discriminant(..)
+ | Rvalue::Aggregate(..)
+ | Rvalue::ShallowInitBox(..) => {
+ // No modification is possible through these r-values.
+ ValueOrPlace::top()
+ }
+ }
+ }
+
+ fn handle_operand(
+ &self,
+ operand: &Operand<'tcx>,
+ state: &mut State,
+ ) -> ValueOrPlace {
+ self.super_operand(operand, state)
+ }
+
+ fn super_operand(
+ &self,
+ operand: &Operand<'tcx>,
+ state: &mut State,
+ ) -> ValueOrPlace {
+ match operand {
+ Operand::Constant(box constant) => {
+ ValueOrPlace::Value(self.handle_constant(constant, state))
+ }
+ Operand::Copy(place) | Operand::Move(place) => {
+ // On move, we would ideally flood the place with bottom. But with the current
+ // framework this is not possible (similar to `InterpCx::eval_operand`).
+ self.map()
+ .find(place.as_ref())
+ .map(ValueOrPlace::Place)
+ .unwrap_or(ValueOrPlace::top())
+ }
+ }
+ }
+
+ fn handle_constant(
+ &self,
+ constant: &Constant<'tcx>,
+ state: &mut State,
+ ) -> Self::Value {
+ self.super_constant(constant, state)
+ }
+
+ fn super_constant(
+ &self,
+ _constant: &Constant<'tcx>,
+ _state: &mut State,
+ ) -> Self::Value {
+ Self::Value::top()
+ }
+
+ /// The effect of a successful function call return should not be
+ /// applied here, see [`Analysis::apply_terminator_effect`].
+ fn handle_terminator(&self, terminator: &Terminator<'tcx>, state: &mut State) {
+ self.super_terminator(terminator, state)
+ }
+
+ fn super_terminator(&self, terminator: &Terminator<'tcx>, _state: &mut State) {
+ match &terminator.kind {
+ TerminatorKind::Call { .. } | TerminatorKind::InlineAsm { .. } => {
+ // Effect is applied by `handle_call_return`.
+ }
+ TerminatorKind::Drop { .. } => {
+ // We don't track dropped places.
+ }
+ TerminatorKind::DropAndReplace { .. } | TerminatorKind::Yield { .. } => {
+ // They would have an effect, but are not allowed in this phase.
+ bug!("encountered disallowed terminator");
+ }
+ TerminatorKind::Goto { .. }
+ | TerminatorKind::SwitchInt { .. }
+ | TerminatorKind::Resume
+ | TerminatorKind::Abort
+ | TerminatorKind::Return
+ | TerminatorKind::Unreachable
+ | TerminatorKind::Assert { .. }
+ | TerminatorKind::GeneratorDrop
+ | TerminatorKind::FalseEdge { .. }
+ | TerminatorKind::FalseUnwind { .. } => {
+ // These terminators have no effect on the analysis.
+ }
+ }
+ }
+
+ fn handle_call_return(
+ &self,
+ return_places: CallReturnPlaces<'_, 'tcx>,
+ state: &mut State,
+ ) {
+ self.super_call_return(return_places, state)
+ }
+
+ fn super_call_return(
+ &self,
+ return_places: CallReturnPlaces<'_, 'tcx>,
+ state: &mut State,
+ ) {
+ return_places.for_each(|place| {
+ state.flood(place.as_ref(), self.map());
+ })
+ }
+
+ fn handle_switch_int(
+ &self,
+ discr: &Operand<'tcx>,
+ apply_edge_effects: &mut impl SwitchIntEdgeEffects>,
+ ) {
+ self.super_switch_int(discr, apply_edge_effects)
+ }
+
+ fn super_switch_int(
+ &self,
+ _discr: &Operand<'tcx>,
+ _apply_edge_effects: &mut impl SwitchIntEdgeEffects>,
+ ) {
+ }
+
+ fn wrap(self) -> ValueAnalysisWrapper
+ where
+ Self: Sized,
+ {
+ ValueAnalysisWrapper(self)
+ }
+}
+
+pub struct ValueAnalysisWrapper(pub T);
+
+impl<'tcx, T: ValueAnalysis<'tcx>> AnalysisDomain<'tcx> for ValueAnalysisWrapper {
+ type Domain = State;
+
+ type Direction = crate::Forward;
+
+ const NAME: &'static str = T::NAME;
+
+ fn bottom_value(&self, _body: &Body<'tcx>) -> Self::Domain {
+ State(StateData::Unreachable)
+ }
+
+ fn initialize_start_block(&self, body: &Body<'tcx>, state: &mut Self::Domain) {
+ // The initial state maps all tracked places of argument projections to ⊤ and the rest to ⊥.
+ assert!(matches!(state.0, StateData::Unreachable));
+ let values = IndexVec::from_elem_n(T::Value::bottom(), self.0.map().value_count);
+ *state = State(StateData::Reachable(values));
+ for arg in body.args_iter() {
+ state.flood(PlaceRef { local: arg, projection: &[] }, self.0.map());
+ }
+ }
+}
+
+impl<'tcx, T> Analysis<'tcx> for ValueAnalysisWrapper
+where
+ T: ValueAnalysis<'tcx>,
+{
+ fn apply_statement_effect(
+ &self,
+ state: &mut Self::Domain,
+ statement: &Statement<'tcx>,
+ _location: Location,
+ ) {
+ if state.is_reachable() {
+ self.0.handle_statement(statement, state);
+ }
+ }
+
+ fn apply_terminator_effect(
+ &self,
+ state: &mut Self::Domain,
+ terminator: &Terminator<'tcx>,
+ _location: Location,
+ ) {
+ if state.is_reachable() {
+ self.0.handle_terminator(terminator, state);
+ }
+ }
+
+ fn apply_call_return_effect(
+ &self,
+ state: &mut Self::Domain,
+ _block: BasicBlock,
+ return_places: crate::CallReturnPlaces<'_, 'tcx>,
+ ) {
+ if state.is_reachable() {
+ self.0.handle_call_return(return_places, state)
+ }
+ }
+
+ fn apply_switch_int_edge_effects(
+ &self,
+ _block: BasicBlock,
+ discr: &Operand<'tcx>,
+ apply_edge_effects: &mut impl SwitchIntEdgeEffects,
+ ) {
+ // FIXME: Dataflow framework provides no access to current state here.
+ self.0.handle_switch_int(discr, apply_edge_effects)
+ }
+}
+
+rustc_index::newtype_index!(
+ /// This index uniquely identifies a place.
+ ///
+ /// Not every place has a `PlaceIndex`, and not every `PlaceIndex` correspondends to a tracked
+ /// place. However, every tracked place and all places along its projection have a `PlaceIndex`.
+ pub struct PlaceIndex {}
+);
+
+rustc_index::newtype_index!(
+ /// This index uniquely identifies a tracked place and therefore a slot in [`State`].
+ ///
+ /// It is an implementation detail of this module.
+ struct ValueIndex {}
+);
+
+/// See [`State`].
+#[derive(PartialEq, Eq, Debug)]
+enum StateData {
+ Reachable(IndexVec),
+ Unreachable,
+}
+
+impl Clone for StateData {
+ fn clone(&self) -> Self {
+ match self {
+ Self::Reachable(x) => Self::Reachable(x.clone()),
+ Self::Unreachable => Self::Unreachable,
+ }
+ }
+
+ fn clone_from(&mut self, source: &Self) {
+ match (&mut *self, source) {
+ (Self::Reachable(x), Self::Reachable(y)) => {
+ // We go through `raw` here, because `IndexVec` currently has a naive `clone_from`.
+ x.raw.clone_from(&y.raw);
+ }
+ _ => *self = source.clone(),
+ }
+ }
+}
+
+/// The dataflow state for an instance of [`ValueAnalysis`].
+///
+/// Every instance specifies a lattice that represents the possible values of a single tracked
+/// place. If we call this lattice `V` and set set of tracked places `P`, then a [`State`] is an
+/// element of `{unreachable} ∪ (P -> V)`. This again forms a lattice, where the bottom element is
+/// `unreachable` and the top element is the mapping `p ↦ ⊤`. Note that the mapping `p ↦ ⊥` is not
+/// the bottom element (because joining an unreachable and any other reachable state yields a
+/// reachable state). All operations on unreachable states are ignored.
+///
+/// Flooding means assigning a value (by default `⊤`) to all tracked projections of a given place.
+#[derive(PartialEq, Eq, Debug)]
+pub struct State(StateData);
+
+impl Clone for State {
+ fn clone(&self) -> Self {
+ Self(self.0.clone())
+ }
+
+ fn clone_from(&mut self, source: &Self) {
+ self.0.clone_from(&source.0);
+ }
+}
+
+impl State {
+ pub fn is_reachable(&self) -> bool {
+ matches!(&self.0, StateData::Reachable(_))
+ }
+
+ pub fn mark_unreachable(&mut self) {
+ self.0 = StateData::Unreachable;
+ }
+
+ pub fn flood_all(&mut self) {
+ self.flood_all_with(V::top())
+ }
+
+ pub fn flood_all_with(&mut self, value: V) {
+ let StateData::Reachable(values) = &mut self.0 else { return };
+ values.raw.fill(value);
+ }
+
+ pub fn flood_with(&mut self, place: PlaceRef<'_>, map: &Map, value: V) {
+ if let Some(root) = map.find(place) {
+ self.flood_idx_with(root, map, value);
+ }
+ }
+
+ pub fn flood(&mut self, place: PlaceRef<'_>, map: &Map) {
+ self.flood_with(place, map, V::top())
+ }
+
+ pub fn flood_idx_with(&mut self, place: PlaceIndex, map: &Map, value: V) {
+ let StateData::Reachable(values) = &mut self.0 else { return };
+ map.preorder_invoke(place, &mut |place| {
+ if let Some(vi) = map.places[place].value_index {
+ values[vi] = value.clone();
+ }
+ });
+ }
+
+ pub fn flood_idx(&mut self, place: PlaceIndex, map: &Map) {
+ self.flood_idx_with(place, map, V::top())
+ }
+
+ /// Copies `source` to `target`, including all tracked places beneath.
+ ///
+ /// If `target` contains a place that is not contained in `source`, it will be overwritten with
+ /// Top. Also, because this will copy all entries one after another, it may only be used for
+ /// places that are non-overlapping or identical.
+ pub fn assign_place_idx(&mut self, target: PlaceIndex, source: PlaceIndex, map: &Map) {
+ let StateData::Reachable(values) = &mut self.0 else { return };
+
+ // If both places are tracked, we copy the value to the target. If the target is tracked,
+ // but the source is not, we have to invalidate the value in target. If the target is not
+ // tracked, then we don't have to do anything.
+ if let Some(target_value) = map.places[target].value_index {
+ if let Some(source_value) = map.places[source].value_index {
+ values[target_value] = values[source_value].clone();
+ } else {
+ values[target_value] = V::top();
+ }
+ }
+ for target_child in map.children(target) {
+ // Try to find corresponding child and recurse. Reasoning is similar as above.
+ let projection = map.places[target_child].proj_elem.unwrap();
+ if let Some(source_child) = map.projections.get(&(source, projection)) {
+ self.assign_place_idx(target_child, *source_child, map);
+ } else {
+ self.flood_idx(target_child, map);
+ }
+ }
+ }
+
+ pub fn assign(&mut self, target: PlaceRef<'_>, result: ValueOrPlace, map: &Map) {
+ if let Some(target) = map.find(target) {
+ self.assign_idx(target, result, map);
+ } else {
+ // We don't track this place nor any projections, assignment can be ignored.
+ }
+ }
+
+ pub fn assign_idx(&mut self, target: PlaceIndex, result: ValueOrPlace, map: &Map) {
+ match result {
+ ValueOrPlace::Value(value) => {
+ // First flood the target place in case we also track any projections (although
+ // this scenario is currently not well-supported by the API).
+ self.flood_idx(target, map);
+ let StateData::Reachable(values) = &mut self.0 else { return };
+ if let Some(value_index) = map.places[target].value_index {
+ values[value_index] = value;
+ }
+ }
+ ValueOrPlace::Place(source) => self.assign_place_idx(target, source, map),
+ }
+ }
+
+ /// Retrieve the value stored for a place, or ⊤ if it is not tracked.
+ pub fn get(&self, place: PlaceRef<'_>, map: &Map) -> V {
+ map.find(place).map(|place| self.get_idx(place, map)).unwrap_or(V::top())
+ }
+
+ /// Retrieve the value stored for a place index, or ⊤ if it is not tracked.
+ pub fn get_idx(&self, place: PlaceIndex, map: &Map) -> V {
+ match &self.0 {
+ StateData::Reachable(values) => {
+ map.places[place].value_index.map(|v| values[v].clone()).unwrap_or(V::top())
+ }
+ StateData::Unreachable => {
+ // Because this is unreachable, we can return any value we want.
+ V::bottom()
+ }
+ }
+ }
+}
+
+impl JoinSemiLattice for State {
+ fn join(&mut self, other: &Self) -> bool {
+ match (&mut self.0, &other.0) {
+ (_, StateData::Unreachable) => false,
+ (StateData::Unreachable, _) => {
+ *self = other.clone();
+ true
+ }
+ (StateData::Reachable(this), StateData::Reachable(other)) => this.join(other),
+ }
+ }
+}
+
+/// Partial mapping from [`Place`] to [`PlaceIndex`], where some places also have a [`ValueIndex`].
+///
+/// This data structure essentially maintains a tree of places and their projections. Some
+/// additional bookkeeping is done, to speed up traversal over this tree:
+/// - For iteration, every [`PlaceInfo`] contains an intrusive linked list of its children.
+/// - To directly get the child for a specific projection, there is a `projections` map.
+#[derive(Debug)]
+pub struct Map {
+ locals: IndexVec>,
+ projections: FxHashMap<(PlaceIndex, TrackElem), PlaceIndex>,
+ places: IndexVec,
+ value_count: usize,
+}
+
+impl Map {
+ fn new() -> Self {
+ Self {
+ locals: IndexVec::new(),
+ projections: FxHashMap::default(),
+ places: IndexVec::new(),
+ value_count: 0,
+ }
+ }
+
+ /// Returns a map that only tracks places whose type passes the filter.
+ ///
+ /// This is currently the only way to create a [`Map`]. The way in which the tracked places are
+ /// chosen is an implementation detail and may not be relied upon (other than that their type
+ /// passes the filter).
+ #[instrument(skip_all, level = "debug")]
+ pub fn from_filter<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ body: &Body<'tcx>,
+ filter: impl FnMut(Ty<'tcx>) -> bool,
+ ) -> Self {
+ let mut map = Self::new();
+ let exclude = excluded_locals(body);
+ map.register_with_filter(tcx, body, filter, &exclude);
+ debug!("registered {} places ({} nodes in total)", map.value_count, map.places.len());
+ map
+ }
+
+ /// Register all non-excluded places that pass the filter.
+ fn register_with_filter<'tcx>(
+ &mut self,
+ tcx: TyCtxt<'tcx>,
+ body: &Body<'tcx>,
+ mut filter: impl FnMut(Ty<'tcx>) -> bool,
+ exclude: &IndexVec,
+ ) {
+ // We use this vector as stack, pushing and popping projections.
+ let mut projection = Vec::new();
+ for (local, decl) in body.local_decls.iter_enumerated() {
+ if !exclude[local] {
+ self.register_with_filter_rec(tcx, local, &mut projection, decl.ty, &mut filter);
+ }
+ }
+ }
+
+ /// Potentially register the (local, projection) place and its fields, recursively.
+ ///
+ /// Invariant: The projection must only contain fields.
+ fn register_with_filter_rec<'tcx>(
+ &mut self,
+ tcx: TyCtxt<'tcx>,
+ local: Local,
+ projection: &mut Vec>,
+ ty: Ty<'tcx>,
+ filter: &mut impl FnMut(Ty<'tcx>) -> bool,
+ ) {
+ // Note: The framework supports only scalars for now.
+ if filter(ty) && ty.is_scalar() {
+ // We know that the projection only contains trackable elements.
+ let place = self.make_place(local, projection).unwrap();
+
+ // Allocate a value slot if it doesn't have one.
+ if self.places[place].value_index.is_none() {
+ self.places[place].value_index = Some(self.value_count.into());
+ self.value_count += 1;
+ }
+ }
+
+ // Recurse with all fields of this place.
+ iter_fields(ty, tcx, |variant, field, ty| {
+ if variant.is_some() {
+ // Downcasts are currently not supported.
+ return;
+ }
+ projection.push(PlaceElem::Field(field, ty));
+ self.register_with_filter_rec(tcx, local, projection, ty, filter);
+ projection.pop();
+ });
+ }
+
+ /// Tries to add the place to the map, without allocating a value slot.
+ ///
+ /// Can fail if the projection contains non-trackable elements.
+ fn make_place<'tcx>(
+ &mut self,
+ local: Local,
+ projection: &[PlaceElem<'tcx>],
+ ) -> Result {
+ // Get the base index of the local.
+ let mut index =
+ *self.locals.get_or_insert_with(local, || self.places.push(PlaceInfo::new(None)));
+
+ // Apply the projection.
+ for &elem in projection {
+ let elem = elem.try_into()?;
+ index = *self.projections.entry((index, elem)).or_insert_with(|| {
+ // Prepend new child to the linked list.
+ let next = self.places.push(PlaceInfo::new(Some(elem)));
+ self.places[next].next_sibling = self.places[index].first_child;
+ self.places[index].first_child = Some(next);
+ next
+ });
+ }
+
+ Ok(index)
+ }
+
+ /// Returns the number of tracked places, i.e., those for which a value can be stored.
+ pub fn tracked_places(&self) -> usize {
+ self.value_count
+ }
+
+ /// Applies a single projection element, yielding the corresponding child.
+ pub fn apply(&self, place: PlaceIndex, elem: TrackElem) -> Option {
+ self.projections.get(&(place, elem)).copied()
+ }
+
+ /// Locates the given place, if it exists in the tree.
+ pub fn find(&self, place: PlaceRef<'_>) -> Option {
+ let mut index = *self.locals.get(place.local)?.as_ref()?;
+
+ for &elem in place.projection {
+ index = self.apply(index, elem.try_into().ok()?)?;
+ }
+
+ Some(index)
+ }
+
+ /// Iterate over all direct children.
+ pub fn children(&self, parent: PlaceIndex) -> impl Iterator- + '_ {
+ Children::new(self, parent)
+ }
+
+ /// Invoke a function on the given place and all descendants.
+ pub fn preorder_invoke(&self, root: PlaceIndex, f: &mut impl FnMut(PlaceIndex)) {
+ f(root);
+ for child in self.children(root) {
+ self.preorder_invoke(child, f);
+ }
+ }
+}
+
+/// This is the information tracked for every [`PlaceIndex`] and is stored by [`Map`].
+///
+/// Together, `first_child` and `next_sibling` form an intrusive linked list, which is used to
+/// model a tree structure (a replacement for a member like `children: Vec`).
+#[derive(Debug)]
+struct PlaceInfo {
+ /// We store a [`ValueIndex`] if and only if the placed is tracked by the analysis.
+ value_index: Option,
+
+ /// The projection used to go from parent to this node (only None for root).
+ proj_elem: Option,
+
+ /// The left-most child.
+ first_child: Option,
+
+ /// Index of the sibling to the right of this node.
+ next_sibling: Option,
+}
+
+impl PlaceInfo {
+ fn new(proj_elem: Option) -> Self {
+ Self { next_sibling: None, first_child: None, proj_elem, value_index: None }
+ }
+}
+
+struct Children<'a> {
+ map: &'a Map,
+ next: Option,
+}
+
+impl<'a> Children<'a> {
+ fn new(map: &'a Map, parent: PlaceIndex) -> Self {
+ Self { map, next: map.places[parent].first_child }
+ }
+}
+
+impl<'a> Iterator for Children<'a> {
+ type Item = PlaceIndex;
+
+ fn next(&mut self) -> Option {
+ match self.next {
+ Some(child) => {
+ self.next = self.map.places[child].next_sibling;
+ Some(child)
+ }
+ None => None,
+ }
+ }
+}
+
+/// Used as the result of an operand or r-value.
+pub enum ValueOrPlace {
+ Value(V),
+ Place(PlaceIndex),
+}
+
+impl ValueOrPlace {
+ pub fn top() -> Self {
+ ValueOrPlace::Value(V::top())
+ }
+}
+
+/// The set of projection elements that can be used by a tracked place.
+///
+/// Although only field projections are currently allowed, this could change in the future.
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
+pub enum TrackElem {
+ Field(Field),
+}
+
+impl TryFrom> for TrackElem {
+ type Error = ();
+
+ fn try_from(value: ProjectionElem) -> Result {
+ match value {
+ ProjectionElem::Field(field, _) => Ok(TrackElem::Field(field)),
+ _ => Err(()),
+ }
+ }
+}
+
+/// Invokes `f` on all direct fields of `ty`.
+fn iter_fields<'tcx>(
+ ty: Ty<'tcx>,
+ tcx: TyCtxt<'tcx>,
+ mut f: impl FnMut(Option, Field, Ty<'tcx>),
+) {
+ match ty.kind() {
+ ty::Tuple(list) => {
+ for (field, ty) in list.iter().enumerate() {
+ f(None, field.into(), ty);
+ }
+ }
+ ty::Adt(def, substs) => {
+ if def.is_union() {
+ return;
+ }
+ for (v_index, v_def) in def.variants().iter_enumerated() {
+ let variant = if def.is_struct() { None } else { Some(v_index) };
+ for (f_index, f_def) in v_def.fields.iter().enumerate() {
+ let field_ty = f_def.ty(tcx, substs);
+ let field_ty = tcx
+ .try_normalize_erasing_regions(ty::ParamEnv::reveal_all(), field_ty)
+ .unwrap_or(field_ty);
+ f(variant, f_index.into(), field_ty);
+ }
+ }
+ }
+ ty::Closure(_, substs) => {
+ iter_fields(substs.as_closure().tupled_upvars_ty(), tcx, f);
+ }
+ _ => (),
+ }
+}
+
+/// Returns all locals with projections that have their reference or address taken.
+fn excluded_locals<'tcx>(body: &Body<'tcx>) -> IndexVec {
+ struct Collector {
+ result: IndexVec,
+ }
+
+ impl<'tcx> Visitor<'tcx> for Collector {
+ fn visit_place(&mut self, place: &Place<'tcx>, context: PlaceContext, _location: Location) {
+ if context.is_borrow()
+ || context.is_address_of()
+ || context.is_drop()
+ || context == PlaceContext::MutatingUse(MutatingUseContext::AsmOutput)
+ {
+ // A pointer to a place could be used to access other places with the same local,
+ // hence we have to exclude the local completely.
+ self.result[place.local] = true;
+ }
+ }
+ }
+
+ let mut collector = Collector { result: IndexVec::from_elem(false, &body.local_decls) };
+ collector.visit_body(body);
+ collector.result
+}
+
+/// This is used to visualize the dataflow analysis.
+impl<'tcx, T> DebugWithContext> for State
+where
+ T: ValueAnalysis<'tcx>,
+ T::Value: Debug,
+{
+ fn fmt_with(&self, ctxt: &ValueAnalysisWrapper, f: &mut Formatter<'_>) -> std::fmt::Result {
+ match &self.0 {
+ StateData::Reachable(values) => debug_with_context(values, None, ctxt.0.map(), f),
+ StateData::Unreachable => write!(f, "unreachable"),
+ }
+ }
+
+ fn fmt_diff_with(
+ &self,
+ old: &Self,
+ ctxt: &ValueAnalysisWrapper,
+ f: &mut Formatter<'_>,
+ ) -> std::fmt::Result {
+ match (&self.0, &old.0) {
+ (StateData::Reachable(this), StateData::Reachable(old)) => {
+ debug_with_context(this, Some(old), ctxt.0.map(), f)
+ }
+ _ => Ok(()), // Consider printing something here.
+ }
+ }
+}
+
+fn debug_with_context_rec(
+ place: PlaceIndex,
+ place_str: &str,
+ new: &IndexVec,
+ old: Option<&IndexVec>,
+ map: &Map,
+ f: &mut Formatter<'_>,
+) -> std::fmt::Result {
+ if let Some(value) = map.places[place].value_index {
+ match old {
+ None => writeln!(f, "{}: {:?}", place_str, new[value])?,
+ Some(old) => {
+ if new[value] != old[value] {
+ writeln!(f, "\u{001f}-{}: {:?}", place_str, old[value])?;
+ writeln!(f, "\u{001f}+{}: {:?}", place_str, new[value])?;
+ }
+ }
+ }
+ }
+
+ for child in map.children(place) {
+ let info_elem = map.places[child].proj_elem.unwrap();
+ let child_place_str = match info_elem {
+ TrackElem::Field(field) => {
+ if place_str.starts_with("*") {
+ format!("({}).{}", place_str, field.index())
+ } else {
+ format!("{}.{}", place_str, field.index())
+ }
+ }
+ };
+ debug_with_context_rec(child, &child_place_str, new, old, map, f)?;
+ }
+
+ Ok(())
+}
+
+fn debug_with_context(
+ new: &IndexVec,
+ old: Option<&IndexVec>,
+ map: &Map,
+ f: &mut Formatter<'_>,
+) -> std::fmt::Result {
+ for (local, place) in map.locals.iter_enumerated() {
+ if let Some(place) = place {
+ debug_with_context_rec(*place, &format!("{:?}", local), new, old, map, f)?;
+ }
+ }
+ Ok(())
+}
diff --git a/compiler/rustc_mir_transform/src/dataflow_const_prop.rs b/compiler/rustc_mir_transform/src/dataflow_const_prop.rs
new file mode 100644
index 0000000000000..e9027387413cf
--- /dev/null
+++ b/compiler/rustc_mir_transform/src/dataflow_const_prop.rs
@@ -0,0 +1,530 @@
+//! A constant propagation optimization pass based on dataflow analysis.
+//!
+//! Currently, this pass only propagates scalar values.
+
+use rustc_const_eval::interpret::{ConstValue, ImmTy, Immediate, InterpCx, Scalar};
+use rustc_data_structures::fx::FxHashMap;
+use rustc_middle::mir::visit::{MutVisitor, Visitor};
+use rustc_middle::mir::*;
+use rustc_middle::ty::{self, Ty, TyCtxt};
+use rustc_mir_dataflow::value_analysis::{Map, State, TrackElem, ValueAnalysis, ValueOrPlace};
+use rustc_mir_dataflow::{lattice::FlatSet, Analysis, ResultsVisitor, SwitchIntEdgeEffects};
+use rustc_span::DUMMY_SP;
+
+use crate::MirPass;
+
+// These constants are somewhat random guesses and have not been optimized.
+// If `tcx.sess.mir_opt_level() >= 4`, we ignore the limits (this can become very expensive).
+const BLOCK_LIMIT: usize = 100;
+const PLACE_LIMIT: usize = 100;
+
+pub struct DataflowConstProp;
+
+impl<'tcx> MirPass<'tcx> for DataflowConstProp {
+ fn is_enabled(&self, sess: &rustc_session::Session) -> bool {
+ sess.mir_opt_level() >= 3
+ }
+
+ #[instrument(skip_all level = "debug")]
+ fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
+ if tcx.sess.mir_opt_level() < 4 && body.basic_blocks.len() > BLOCK_LIMIT {
+ debug!("aborted dataflow const prop due too many basic blocks");
+ return;
+ }
+
+ // Decide which places to track during the analysis.
+ let map = Map::from_filter(tcx, body, Ty::is_scalar);
+
+ // We want to have a somewhat linear runtime w.r.t. the number of statements/terminators.
+ // Let's call this number `n`. Dataflow analysis has `O(h*n)` transfer function
+ // applications, where `h` is the height of the lattice. Because the height of our lattice
+ // is linear w.r.t. the number of tracked places, this is `O(tracked_places * n)`. However,
+ // because every transfer function application could traverse the whole map, this becomes
+ // `O(num_nodes * tracked_places * n)` in terms of time complexity. Since the number of
+ // map nodes is strongly correlated to the number of tracked places, this becomes more or
+ // less `O(n)` if we place a constant limit on the number of tracked places.
+ if tcx.sess.mir_opt_level() < 4 && map.tracked_places() > PLACE_LIMIT {
+ debug!("aborted dataflow const prop due to too many tracked places");
+ return;
+ }
+
+ // Perform the actual dataflow analysis.
+ let analysis = ConstAnalysis::new(tcx, body, map);
+ let results = debug_span!("analyze")
+ .in_scope(|| analysis.wrap().into_engine(tcx, body).iterate_to_fixpoint());
+
+ // Collect results and patch the body afterwards.
+ let mut visitor = CollectAndPatch::new(tcx, &results.analysis.0.map);
+ debug_span!("collect").in_scope(|| results.visit_reachable_with(body, &mut visitor));
+ debug_span!("patch").in_scope(|| visitor.visit_body(body));
+ }
+}
+
+struct ConstAnalysis<'tcx> {
+ map: Map,
+ tcx: TyCtxt<'tcx>,
+ ecx: InterpCx<'tcx, 'tcx, DummyMachine>,
+ param_env: ty::ParamEnv<'tcx>,
+}
+
+impl<'tcx> ValueAnalysis<'tcx> for ConstAnalysis<'tcx> {
+ type Value = FlatSet>;
+
+ const NAME: &'static str = "ConstAnalysis";
+
+ fn map(&self) -> &Map {
+ &self.map
+ }
+
+ fn handle_assign(
+ &self,
+ target: Place<'tcx>,
+ rvalue: &Rvalue<'tcx>,
+ state: &mut State,
+ ) {
+ match rvalue {
+ Rvalue::CheckedBinaryOp(op, box (left, right)) => {
+ let target = self.map().find(target.as_ref());
+ if let Some(target) = target {
+ // We should not track any projections other than
+ // what is overwritten below, but just in case...
+ state.flood_idx(target, self.map());
+ }
+
+ let value_target = target
+ .and_then(|target| self.map().apply(target, TrackElem::Field(0_u32.into())));
+ let overflow_target = target
+ .and_then(|target| self.map().apply(target, TrackElem::Field(1_u32.into())));
+
+ if value_target.is_some() || overflow_target.is_some() {
+ let (val, overflow) = self.binary_op(state, *op, left, right);
+
+ if let Some(value_target) = value_target {
+ state.assign_idx(value_target, ValueOrPlace::Value(val), self.map());
+ }
+ if let Some(overflow_target) = overflow_target {
+ let overflow = match overflow {
+ FlatSet::Top => FlatSet::Top,
+ FlatSet::Elem(overflow) => {
+ if overflow {
+ // Overflow cannot be reliably propagated. See: https://github.com/rust-lang/rust/pull/101168#issuecomment-1288091446
+ FlatSet::Top
+ } else {
+ self.wrap_scalar(Scalar::from_bool(false), self.tcx.types.bool)
+ }
+ }
+ FlatSet::Bottom => FlatSet::Bottom,
+ };
+ state.assign_idx(
+ overflow_target,
+ ValueOrPlace::Value(overflow),
+ self.map(),
+ );
+ }
+ }
+ }
+ _ => self.super_assign(target, rvalue, state),
+ }
+ }
+
+ fn handle_rvalue(
+ &self,
+ rvalue: &Rvalue<'tcx>,
+ state: &mut State,
+ ) -> ValueOrPlace {
+ match rvalue {
+ Rvalue::Cast(
+ kind @ (CastKind::IntToInt
+ | CastKind::FloatToInt
+ | CastKind::FloatToFloat
+ | CastKind::IntToFloat),
+ operand,
+ ty,
+ ) => match self.eval_operand(operand, state) {
+ FlatSet::Elem(op) => match kind {
+ CastKind::IntToInt | CastKind::IntToFloat => {
+ self.ecx.int_to_int_or_float(&op, *ty)
+ }
+ CastKind::FloatToInt | CastKind::FloatToFloat => {
+ self.ecx.float_to_float_or_int(&op, *ty)
+ }
+ _ => unreachable!(),
+ }
+ .map(|result| ValueOrPlace::Value(self.wrap_immediate(result, *ty)))
+ .unwrap_or(ValueOrPlace::top()),
+ _ => ValueOrPlace::top(),
+ },
+ Rvalue::BinaryOp(op, box (left, right)) => {
+ // Overflows must be ignored here.
+ let (val, _overflow) = self.binary_op(state, *op, left, right);
+ ValueOrPlace::Value(val)
+ }
+ Rvalue::UnaryOp(op, operand) => match self.eval_operand(operand, state) {
+ FlatSet::Elem(value) => self
+ .ecx
+ .unary_op(*op, &value)
+ .map(|val| ValueOrPlace::Value(self.wrap_immty(val)))
+ .unwrap_or(ValueOrPlace::Value(FlatSet::Top)),
+ FlatSet::Bottom => ValueOrPlace::Value(FlatSet::Bottom),
+ FlatSet::Top => ValueOrPlace::Value(FlatSet::Top),
+ },
+ _ => self.super_rvalue(rvalue, state),
+ }
+ }
+
+ fn handle_constant(
+ &self,
+ constant: &Constant<'tcx>,
+ _state: &mut State,
+ ) -> Self::Value {
+ constant
+ .literal
+ .eval(self.tcx, self.param_env)
+ .try_to_scalar()
+ .map(|value| FlatSet::Elem(ScalarTy(value, constant.ty())))
+ .unwrap_or(FlatSet::Top)
+ }
+
+ fn handle_switch_int(
+ &self,
+ discr: &Operand<'tcx>,
+ apply_edge_effects: &mut impl SwitchIntEdgeEffects>,
+ ) {
+ // FIXME: The dataflow framework only provides the state if we call `apply()`, which makes
+ // this more inefficient than it has to be.
+ let mut discr_value = None;
+ let mut handled = false;
+ apply_edge_effects.apply(|state, target| {
+ let discr_value = match discr_value {
+ Some(value) => value,
+ None => {
+ let value = match self.handle_operand(discr, state) {
+ ValueOrPlace::Value(value) => value,
+ ValueOrPlace::Place(place) => state.get_idx(place, self.map()),
+ };
+ let result = match value {
+ FlatSet::Top => FlatSet::Top,
+ FlatSet::Elem(ScalarTy(scalar, _)) => {
+ let int = scalar.assert_int();
+ FlatSet::Elem(int.assert_bits(int.size()))
+ }
+ FlatSet::Bottom => FlatSet::Bottom,
+ };
+ discr_value = Some(result);
+ result
+ }
+ };
+
+ let FlatSet::Elem(choice) = discr_value else {
+ // Do nothing if we don't know which branch will be taken.
+ return
+ };
+
+ if target.value.map(|n| n == choice).unwrap_or(!handled) {
+ // Branch is taken. Has no effect on state.
+ handled = true;
+ } else {
+ // Branch is not taken.
+ state.mark_unreachable();
+ }
+ })
+ }
+}
+
+#[derive(Clone, PartialEq, Eq)]
+struct ScalarTy<'tcx>(Scalar, Ty<'tcx>);
+
+impl<'tcx> std::fmt::Debug for ScalarTy<'tcx> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ // This is used for dataflow visualization, so we return something more concise.
+ std::fmt::Display::fmt(&ConstantKind::Val(ConstValue::Scalar(self.0), self.1), f)
+ }
+}
+
+impl<'tcx> ConstAnalysis<'tcx> {
+ pub fn new(tcx: TyCtxt<'tcx>, body: &Body<'tcx>, map: Map) -> Self {
+ let param_env = tcx.param_env(body.source.def_id());
+ Self {
+ map,
+ tcx,
+ ecx: InterpCx::new(tcx, DUMMY_SP, param_env, DummyMachine),
+ param_env: param_env,
+ }
+ }
+
+ fn binary_op(
+ &self,
+ state: &mut State>>,
+ op: BinOp,
+ left: &Operand<'tcx>,
+ right: &Operand<'tcx>,
+ ) -> (FlatSet>, FlatSet) {
+ let left = self.eval_operand(left, state);
+ let right = self.eval_operand(right, state);
+ match (left, right) {
+ (FlatSet::Elem(left), FlatSet::Elem(right)) => {
+ match self.ecx.overflowing_binary_op(op, &left, &right) {
+ Ok((val, overflow, ty)) => (self.wrap_scalar(val, ty), FlatSet::Elem(overflow)),
+ _ => (FlatSet::Top, FlatSet::Top),
+ }
+ }
+ (FlatSet::Bottom, _) | (_, FlatSet::Bottom) => (FlatSet::Bottom, FlatSet::Bottom),
+ (_, _) => {
+ // Could attempt some algebraic simplifcations here.
+ (FlatSet::Top, FlatSet::Top)
+ }
+ }
+ }
+
+ fn eval_operand(
+ &self,
+ op: &Operand<'tcx>,
+ state: &mut State>>,
+ ) -> FlatSet> {
+ let value = match self.handle_operand(op, state) {
+ ValueOrPlace::Value(value) => value,
+ ValueOrPlace::Place(place) => state.get_idx(place, &self.map),
+ };
+ match value {
+ FlatSet::Top => FlatSet::Top,
+ FlatSet::Elem(ScalarTy(scalar, ty)) => self
+ .tcx
+ .layout_of(self.param_env.and(ty))
+ .map(|layout| FlatSet::Elem(ImmTy::from_scalar(scalar, layout)))
+ .unwrap_or(FlatSet::Top),
+ FlatSet::Bottom => FlatSet::Bottom,
+ }
+ }
+
+ fn wrap_scalar(&self, scalar: Scalar, ty: Ty<'tcx>) -> FlatSet> {
+ FlatSet::Elem(ScalarTy(scalar, ty))
+ }
+
+ fn wrap_immediate(&self, imm: Immediate, ty: Ty<'tcx>) -> FlatSet> {
+ match imm {
+ Immediate::Scalar(scalar) => self.wrap_scalar(scalar, ty),
+ _ => FlatSet::Top,
+ }
+ }
+
+ fn wrap_immty(&self, val: ImmTy<'tcx>) -> FlatSet> {
+ self.wrap_immediate(*val, val.layout.ty)
+ }
+}
+
+struct CollectAndPatch<'tcx, 'map> {
+ tcx: TyCtxt<'tcx>,
+ map: &'map Map,
+
+ /// For a given MIR location, this stores the values of the operands used by that location. In
+ /// particular, this is before the effect, such that the operands of `_1 = _1 + _2` are
+ /// properly captured. (This may become UB soon, but it is currently emitted even by safe code.)
+ before_effect: FxHashMap<(Location, Place<'tcx>), ScalarTy<'tcx>>,
+
+ /// Stores the assigned values for assignments where the Rvalue is constant.
+ assignments: FxHashMap>,
+}
+
+impl<'tcx, 'map> CollectAndPatch<'tcx, 'map> {
+ fn new(tcx: TyCtxt<'tcx>, map: &'map Map) -> Self {
+ Self { tcx, map, before_effect: FxHashMap::default(), assignments: FxHashMap::default() }
+ }
+
+ fn make_operand(&self, scalar: ScalarTy<'tcx>) -> Operand<'tcx> {
+ Operand::Constant(Box::new(Constant {
+ span: DUMMY_SP,
+ user_ty: None,
+ literal: ConstantKind::Val(ConstValue::Scalar(scalar.0), scalar.1),
+ }))
+ }
+}
+
+impl<'mir, 'tcx, 'map> ResultsVisitor<'mir, 'tcx> for CollectAndPatch<'tcx, 'map> {
+ type FlowState = State>>;
+
+ fn visit_statement_before_primary_effect(
+ &mut self,
+ state: &Self::FlowState,
+ statement: &'mir Statement<'tcx>,
+ location: Location,
+ ) {
+ match &statement.kind {
+ StatementKind::Assign(box (_, rvalue)) => {
+ OperandCollector { state, visitor: self }.visit_rvalue(rvalue, location);
+ }
+ _ => (),
+ }
+ }
+
+ fn visit_statement_after_primary_effect(
+ &mut self,
+ state: &Self::FlowState,
+ statement: &'mir Statement<'tcx>,
+ location: Location,
+ ) {
+ match statement.kind {
+ StatementKind::Assign(box (_, Rvalue::Use(Operand::Constant(_)))) => {
+ // Don't overwrite the assignment if it already uses a constant (to keep the span).
+ }
+ StatementKind::Assign(box (place, _)) => match state.get(place.as_ref(), self.map) {
+ FlatSet::Top => (),
+ FlatSet::Elem(value) => {
+ self.assignments.insert(location, value);
+ }
+ FlatSet::Bottom => {
+ // This assignment is either unreachable, or an uninitialized value is assigned.
+ }
+ },
+ _ => (),
+ }
+ }
+
+ fn visit_terminator_before_primary_effect(
+ &mut self,
+ state: &Self::FlowState,
+ terminator: &'mir Terminator<'tcx>,
+ location: Location,
+ ) {
+ OperandCollector { state, visitor: self }.visit_terminator(terminator, location);
+ }
+}
+
+impl<'tcx, 'map> MutVisitor<'tcx> for CollectAndPatch<'tcx, 'map> {
+ fn tcx<'a>(&'a self) -> TyCtxt<'tcx> {
+ self.tcx
+ }
+
+ fn visit_statement(&mut self, statement: &mut Statement<'tcx>, location: Location) {
+ if let Some(value) = self.assignments.get(&location) {
+ match &mut statement.kind {
+ StatementKind::Assign(box (_, rvalue)) => {
+ *rvalue = Rvalue::Use(self.make_operand(value.clone()));
+ }
+ _ => bug!("found assignment info for non-assign statement"),
+ }
+ } else {
+ self.super_statement(statement, location);
+ }
+ }
+
+ fn visit_operand(&mut self, operand: &mut Operand<'tcx>, location: Location) {
+ match operand {
+ Operand::Copy(place) | Operand::Move(place) => {
+ if let Some(value) = self.before_effect.get(&(location, *place)) {
+ *operand = self.make_operand(value.clone());
+ }
+ }
+ _ => (),
+ }
+ }
+}
+
+struct OperandCollector<'tcx, 'map, 'a> {
+ state: &'a State>>,
+ visitor: &'a mut CollectAndPatch<'tcx, 'map>,
+}
+
+impl<'tcx, 'map, 'a> Visitor<'tcx> for OperandCollector<'tcx, 'map, 'a> {
+ fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) {
+ match operand {
+ Operand::Copy(place) | Operand::Move(place) => {
+ match self.state.get(place.as_ref(), self.visitor.map) {
+ FlatSet::Top => (),
+ FlatSet::Elem(value) => {
+ self.visitor.before_effect.insert((location, *place), value);
+ }
+ FlatSet::Bottom => (),
+ }
+ }
+ _ => (),
+ }
+ }
+}
+
+struct DummyMachine;
+
+impl<'mir, 'tcx> rustc_const_eval::interpret::Machine<'mir, 'tcx> for DummyMachine {
+ rustc_const_eval::interpret::compile_time_machine!(<'mir, 'tcx>);
+ type MemoryKind = !;
+ const PANIC_ON_ALLOC_FAIL: bool = true;
+
+ fn enforce_alignment(_ecx: &InterpCx<'mir, 'tcx, Self>) -> bool {
+ unimplemented!()
+ }
+
+ fn enforce_validity(_ecx: &InterpCx<'mir, 'tcx, Self>) -> bool {
+ unimplemented!()
+ }
+
+ fn find_mir_or_eval_fn(
+ _ecx: &mut InterpCx<'mir, 'tcx, Self>,
+ _instance: ty::Instance<'tcx>,
+ _abi: rustc_target::spec::abi::Abi,
+ _args: &[rustc_const_eval::interpret::OpTy<'tcx, Self::Provenance>],
+ _destination: &rustc_const_eval::interpret::PlaceTy<'tcx, Self::Provenance>,
+ _target: Option,
+ _unwind: rustc_const_eval::interpret::StackPopUnwind,
+ ) -> interpret::InterpResult<'tcx, Option<(&'mir Body<'tcx>, ty::Instance<'tcx>)>> {
+ unimplemented!()
+ }
+
+ fn call_intrinsic(
+ _ecx: &mut InterpCx<'mir, 'tcx, Self>,
+ _instance: ty::Instance<'tcx>,
+ _args: &[rustc_const_eval::interpret::OpTy<'tcx, Self::Provenance>],
+ _destination: &rustc_const_eval::interpret::PlaceTy<'tcx, Self::Provenance>,
+ _target: Option,
+ _unwind: rustc_const_eval::interpret::StackPopUnwind,
+ ) -> interpret::InterpResult<'tcx> {
+ unimplemented!()
+ }
+
+ fn assert_panic(
+ _ecx: &mut InterpCx<'mir, 'tcx, Self>,
+ _msg: &rustc_middle::mir::AssertMessage<'tcx>,
+ _unwind: Option,
+ ) -> interpret::InterpResult<'tcx> {
+ unimplemented!()
+ }
+
+ fn binary_ptr_op(
+ _ecx: &InterpCx<'mir, 'tcx, Self>,
+ _bin_op: BinOp,
+ _left: &rustc_const_eval::interpret::ImmTy<'tcx, Self::Provenance>,
+ _right: &rustc_const_eval::interpret::ImmTy<'tcx, Self::Provenance>,
+ ) -> interpret::InterpResult<'tcx, (interpret::Scalar, bool, Ty<'tcx>)> {
+ throw_unsup!(Unsupported("".into()))
+ }
+
+ fn expose_ptr(
+ _ecx: &mut InterpCx<'mir, 'tcx, Self>,
+ _ptr: interpret::Pointer,
+ ) -> interpret::InterpResult<'tcx> {
+ unimplemented!()
+ }
+
+ fn init_frame_extra(
+ _ecx: &mut InterpCx<'mir, 'tcx, Self>,
+ _frame: rustc_const_eval::interpret::Frame<'mir, 'tcx, Self::Provenance>,
+ ) -> interpret::InterpResult<
+ 'tcx,
+ rustc_const_eval::interpret::Frame<'mir, 'tcx, Self::Provenance, Self::FrameExtra>,
+ > {
+ unimplemented!()
+ }
+
+ fn stack<'a>(
+ _ecx: &'a InterpCx<'mir, 'tcx, Self>,
+ ) -> &'a [rustc_const_eval::interpret::Frame<'mir, 'tcx, Self::Provenance, Self::FrameExtra>]
+ {
+ unimplemented!()
+ }
+
+ fn stack_mut<'a>(
+ _ecx: &'a mut InterpCx<'mir, 'tcx, Self>,
+ ) -> &'a mut Vec<
+ rustc_const_eval::interpret::Frame<'mir, 'tcx, Self::Provenance, Self::FrameExtra>,
+ > {
+ unimplemented!()
+ }
+}
diff --git a/compiler/rustc_mir_transform/src/lib.rs b/compiler/rustc_mir_transform/src/lib.rs
index 4791be1306c1b..692eeddfb9857 100644
--- a/compiler/rustc_mir_transform/src/lib.rs
+++ b/compiler/rustc_mir_transform/src/lib.rs
@@ -54,6 +54,7 @@ mod const_goto;
mod const_prop;
mod const_prop_lint;
mod coverage;
+mod dataflow_const_prop;
mod dead_store_elimination;
mod deaggregator;
mod deduce_param_attrs;
@@ -569,6 +570,7 @@ fn run_optimization_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
//
// FIXME(#70073): This pass is responsible for both optimization as well as some lints.
&const_prop::ConstProp,
+ &dataflow_const_prop::DataflowConstProp,
//
// Const-prop runs unconditionally, but doesn't mutate the MIR at mir-opt-level=0.
&const_debuginfo::ConstDebugInfo,
diff --git a/src/test/mir-opt/coverage_graphviz.bar.InstrumentCoverage.0.dot b/src/test/mir-opt/coverage_graphviz.bar.InstrumentCoverage.0.dot
index c00eae96e0838..03df5c9504be2 100644
--- a/src/test/mir-opt/coverage_graphviz.bar.InstrumentCoverage.0.dot
+++ b/src/test/mir-opt/coverage_graphviz.bar.InstrumentCoverage.0.dot
@@ -2,5 +2,5 @@ digraph Cov_0_4 {
graph [fontname="Courier, monospace"];
node [fontname="Courier, monospace"];
edge [fontname="Courier, monospace"];
- bcb0__Cov_0_4 [shape="none", label=<
bcb0 |
|
Counter(bcb0) at 18:1-20:2 19:5-19:9: @0[0]: Coverage::Counter(1) for $DIR/coverage_graphviz.rs:18:1 - 20:2 20:2-20:2: @0.Return: return |
bb0: Return |
>];
+ bcb0__Cov_0_4 [shape="none", label=<bcb0 |
|
Counter(bcb0) at 18:1-20:2 19:5-19:9: @0[0]: Coverage::Counter(1) for $DIR/coverage_graphviz.rs:18:1 - 20:2 20:2-20:2: @0.Return: return |
bb0: Return |
>];
}
diff --git a/src/test/mir-opt/coverage_graphviz.main.InstrumentCoverage.0.dot b/src/test/mir-opt/coverage_graphviz.main.InstrumentCoverage.0.dot
index ca0eb7e845aad..fd21b14af25f3 100644
--- a/src/test/mir-opt/coverage_graphviz.main.InstrumentCoverage.0.dot
+++ b/src/test/mir-opt/coverage_graphviz.main.InstrumentCoverage.0.dot
@@ -2,10 +2,10 @@ digraph Cov_0_3 {
graph [fontname="Courier, monospace"];
node [fontname="Courier, monospace"];
edge [fontname="Courier, monospace"];
- bcb3__Cov_0_3 [shape="none", label=<bcb3 |
Counter(bcb3) at 13:10-13:10 13:10-13:10: @5[0]: Coverage::Counter(2) for $DIR/coverage_graphviz.rs:13:10 - 13:11 |
bb5: Goto |
>];
- bcb2__Cov_0_3 [shape="none", label=<bcb2 |
Expression(bcb1:(bcb0 + bcb3) - bcb3) at 12:13-12:18 12:13-12:18: @4[0]: Coverage::Expression(4294967293) = 4294967294 + 0 for $DIR/coverage_graphviz.rs:15:1 - 15:2 Expression(bcb2:(bcb1:(bcb0 + bcb3) - bcb3) + 0) at 15:2-15:2 15:2-15:2: @4.Return: return |
bb4: Return |
>];
- bcb1__Cov_0_3 [shape="none", label=<bcb1 |
Expression(bcb0 + bcb3) at 10:5-11:17 11:12-11:17: @2.Call: _2 = bar() -> [return: bb3, unwind: bb6] |
bb1: FalseUnwind bb2: Call |
bb3: SwitchInt |
>];
- bcb0__Cov_0_3 [shape="none", label=<bcb0 |
|
Counter(bcb0) at 9:1-9:11 |
bb0: Goto |
>];
+ bcb3__Cov_0_3 [shape="none", label=<bcb3 |
Counter(bcb3) at 13:10-13:10 13:10-13:10: @5[0]: Coverage::Counter(2) for $DIR/coverage_graphviz.rs:13:10 - 13:11 |
bb5: Goto |
>];
+ bcb2__Cov_0_3 [shape="none", label=<bcb2 |
Expression(bcb1:(bcb0 + bcb3) - bcb3) at 12:13-12:18 12:13-12:18: @4[0]: Coverage::Expression(4294967293) = 4294967294 + 0 for $DIR/coverage_graphviz.rs:15:1 - 15:2 Expression(bcb2:(bcb1:(bcb0 + bcb3) - bcb3) + 0) at 15:2-15:2 15:2-15:2: @4.Return: return |
bb4: Return |
>];
+ bcb1__Cov_0_3 [shape="none", label=<bcb1 |
Expression(bcb0 + bcb3) at 10:5-11:17 11:12-11:17: @2.Call: _2 = bar() -> [return: bb3, unwind: bb6] |
bb1: FalseUnwind bb2: Call |
bb3: SwitchInt |
>];
+ bcb0__Cov_0_3 [shape="none", label=<bcb0 |
|
Counter(bcb0) at 9:1-9:11 |
bb0: Goto |
>];
bcb3__Cov_0_3 -> bcb1__Cov_0_3 [label=<>];
bcb1__Cov_0_3 -> bcb3__Cov_0_3 [label=];
bcb1__Cov_0_3 -> bcb2__Cov_0_3 [label=];
diff --git a/src/test/mir-opt/dataflow-const-prop/cast.main.DataflowConstProp.diff b/src/test/mir-opt/dataflow-const-prop/cast.main.DataflowConstProp.diff
new file mode 100644
index 0000000000000..bf9ab8669380a
--- /dev/null
+++ b/src/test/mir-opt/dataflow-const-prop/cast.main.DataflowConstProp.diff
@@ -0,0 +1,37 @@
+- // MIR for `main` before DataflowConstProp
++ // MIR for `main` after DataflowConstProp
+
+ fn main() -> () {
+ let mut _0: (); // return place in scope 0 at $DIR/cast.rs:+0:11: +0:11
+ let _1: i32; // in scope 0 at $DIR/cast.rs:+1:9: +1:10
+ let mut _3: u8; // in scope 0 at $DIR/cast.rs:+2:13: +2:20
+ let mut _4: i32; // in scope 0 at $DIR/cast.rs:+2:13: +2:14
+ scope 1 {
+ debug a => _1; // in scope 1 at $DIR/cast.rs:+1:9: +1:10
+ let _2: u8; // in scope 1 at $DIR/cast.rs:+2:9: +2:10
+ scope 2 {
+ debug b => _2; // in scope 2 at $DIR/cast.rs:+2:9: +2:10
+ }
+ }
+
+ bb0: {
+ StorageLive(_1); // scope 0 at $DIR/cast.rs:+1:9: +1:10
+ _1 = const 257_i32; // scope 0 at $DIR/cast.rs:+1:13: +1:16
+ StorageLive(_2); // scope 1 at $DIR/cast.rs:+2:9: +2:10
+ StorageLive(_3); // scope 1 at $DIR/cast.rs:+2:13: +2:20
+ StorageLive(_4); // scope 1 at $DIR/cast.rs:+2:13: +2:14
+- _4 = _1; // scope 1 at $DIR/cast.rs:+2:13: +2:14
+- _3 = move _4 as u8 (IntToInt); // scope 1 at $DIR/cast.rs:+2:13: +2:20
++ _4 = const 257_i32; // scope 1 at $DIR/cast.rs:+2:13: +2:14
++ _3 = const 1_u8; // scope 1 at $DIR/cast.rs:+2:13: +2:20
+ StorageDead(_4); // scope 1 at $DIR/cast.rs:+2:19: +2:20
+- _2 = Add(move _3, const 1_u8); // scope 1 at $DIR/cast.rs:+2:13: +2:24
++ _2 = const 2_u8; // scope 1 at $DIR/cast.rs:+2:13: +2:24
+ StorageDead(_3); // scope 1 at $DIR/cast.rs:+2:23: +2:24
+ _0 = const (); // scope 0 at $DIR/cast.rs:+0:11: +3:2
+ StorageDead(_2); // scope 1 at $DIR/cast.rs:+3:1: +3:2
+ StorageDead(_1); // scope 0 at $DIR/cast.rs:+3:1: +3:2
+ return; // scope 0 at $DIR/cast.rs:+3:2: +3:2
+ }
+ }
+
diff --git a/src/test/mir-opt/dataflow-const-prop/cast.rs b/src/test/mir-opt/dataflow-const-prop/cast.rs
new file mode 100644
index 0000000000000..484403f7f0ec4
--- /dev/null
+++ b/src/test/mir-opt/dataflow-const-prop/cast.rs
@@ -0,0 +1,7 @@
+// unit-test: DataflowConstProp
+
+// EMIT_MIR cast.main.DataflowConstProp.diff
+fn main() {
+ let a = 257;
+ let b = a as u8 + 1;
+}
diff --git a/src/test/mir-opt/dataflow-const-prop/checked.main.DataflowConstProp.diff b/src/test/mir-opt/dataflow-const-prop/checked.main.DataflowConstProp.diff
new file mode 100644
index 0000000000000..a4ebd0c8c18f0
--- /dev/null
+++ b/src/test/mir-opt/dataflow-const-prop/checked.main.DataflowConstProp.diff
@@ -0,0 +1,80 @@
+- // MIR for `main` before DataflowConstProp
++ // MIR for `main` after DataflowConstProp
+
+ fn main() -> () {
+ let mut _0: (); // return place in scope 0 at $DIR/checked.rs:+0:11: +0:11
+ let _1: i32; // in scope 0 at $DIR/checked.rs:+1:9: +1:10
+ let mut _4: i32; // in scope 0 at $DIR/checked.rs:+3:13: +3:14
+ let mut _5: i32; // in scope 0 at $DIR/checked.rs:+3:17: +3:18
+ let mut _6: (i32, bool); // in scope 0 at $DIR/checked.rs:+3:13: +3:18
+ let mut _9: i32; // in scope 0 at $DIR/checked.rs:+6:13: +6:14
+ let mut _10: (i32, bool); // in scope 0 at $DIR/checked.rs:+6:13: +6:18
+ scope 1 {
+ debug a => _1; // in scope 1 at $DIR/checked.rs:+1:9: +1:10
+ let _2: i32; // in scope 1 at $DIR/checked.rs:+2:9: +2:10
+ scope 2 {
+ debug b => _2; // in scope 2 at $DIR/checked.rs:+2:9: +2:10
+ let _3: i32; // in scope 2 at $DIR/checked.rs:+3:9: +3:10
+ scope 3 {
+ debug c => _3; // in scope 3 at $DIR/checked.rs:+3:9: +3:10
+ let _7: i32; // in scope 3 at $DIR/checked.rs:+5:9: +5:10
+ scope 4 {
+ debug d => _7; // in scope 4 at $DIR/checked.rs:+5:9: +5:10
+ let _8: i32; // in scope 4 at $DIR/checked.rs:+6:9: +6:10
+ scope 5 {
+ debug e => _8; // in scope 5 at $DIR/checked.rs:+6:9: +6:10
+ }
+ }
+ }
+ }
+ }
+
+ bb0: {
+ StorageLive(_1); // scope 0 at $DIR/checked.rs:+1:9: +1:10
+ _1 = const 1_i32; // scope 0 at $DIR/checked.rs:+1:13: +1:14
+ StorageLive(_2); // scope 1 at $DIR/checked.rs:+2:9: +2:10
+ _2 = const 2_i32; // scope 1 at $DIR/checked.rs:+2:13: +2:14
+ StorageLive(_3); // scope 2 at $DIR/checked.rs:+3:9: +3:10
+ StorageLive(_4); // scope 2 at $DIR/checked.rs:+3:13: +3:14
+- _4 = _1; // scope 2 at $DIR/checked.rs:+3:13: +3:14
++ _4 = const 1_i32; // scope 2 at $DIR/checked.rs:+3:13: +3:14
+ StorageLive(_5); // scope 2 at $DIR/checked.rs:+3:17: +3:18
+- _5 = _2; // scope 2 at $DIR/checked.rs:+3:17: +3:18
+- _6 = CheckedAdd(_4, _5); // scope 2 at $DIR/checked.rs:+3:13: +3:18
+- assert(!move (_6.1: bool), "attempt to compute `{} + {}`, which would overflow", move _4, move _5) -> bb1; // scope 2 at $DIR/checked.rs:+3:13: +3:18
++ _5 = const 2_i32; // scope 2 at $DIR/checked.rs:+3:17: +3:18
++ _6 = CheckedAdd(const 1_i32, const 2_i32); // scope 2 at $DIR/checked.rs:+3:13: +3:18
++ assert(!const false, "attempt to compute `{} + {}`, which would overflow", const 1_i32, const 2_i32) -> bb1; // scope 2 at $DIR/checked.rs:+3:13: +3:18
+ }
+
+ bb1: {
+- _3 = move (_6.0: i32); // scope 2 at $DIR/checked.rs:+3:13: +3:18
++ _3 = const 3_i32; // scope 2 at $DIR/checked.rs:+3:13: +3:18
+ StorageDead(_5); // scope 2 at $DIR/checked.rs:+3:17: +3:18
+ StorageDead(_4); // scope 2 at $DIR/checked.rs:+3:17: +3:18
+ StorageLive(_7); // scope 3 at $DIR/checked.rs:+5:9: +5:10
+ _7 = const _; // scope 3 at $DIR/checked.rs:+5:13: +5:21
+ StorageLive(_8); // scope 4 at $DIR/checked.rs:+6:9: +6:10
+ StorageLive(_9); // scope 4 at $DIR/checked.rs:+6:13: +6:14
+- _9 = _7; // scope 4 at $DIR/checked.rs:+6:13: +6:14
+- _10 = CheckedAdd(_9, const 1_i32); // scope 4 at $DIR/checked.rs:+6:13: +6:18
+- assert(!move (_10.1: bool), "attempt to compute `{} + {}`, which would overflow", move _9, const 1_i32) -> bb2; // scope 4 at $DIR/checked.rs:+6:13: +6:18
++ _9 = const i32::MAX; // scope 4 at $DIR/checked.rs:+6:13: +6:14
++ _10 = CheckedAdd(const i32::MAX, const 1_i32); // scope 4 at $DIR/checked.rs:+6:13: +6:18
++ assert(!move (_10.1: bool), "attempt to compute `{} + {}`, which would overflow", const i32::MAX, const 1_i32) -> bb2; // scope 4 at $DIR/checked.rs:+6:13: +6:18
+ }
+
+ bb2: {
+- _8 = move (_10.0: i32); // scope 4 at $DIR/checked.rs:+6:13: +6:18
++ _8 = const i32::MIN; // scope 4 at $DIR/checked.rs:+6:13: +6:18
+ StorageDead(_9); // scope 4 at $DIR/checked.rs:+6:17: +6:18
+ _0 = const (); // scope 0 at $DIR/checked.rs:+0:11: +7:2
+ StorageDead(_8); // scope 4 at $DIR/checked.rs:+7:1: +7:2
+ StorageDead(_7); // scope 3 at $DIR/checked.rs:+7:1: +7:2
+ StorageDead(_3); // scope 2 at $DIR/checked.rs:+7:1: +7:2
+ StorageDead(_2); // scope 1 at $DIR/checked.rs:+7:1: +7:2
+ StorageDead(_1); // scope 0 at $DIR/checked.rs:+7:1: +7:2
+ return; // scope 0 at $DIR/checked.rs:+7:2: +7:2
+ }
+ }
+
diff --git a/src/test/mir-opt/dataflow-const-prop/checked.rs b/src/test/mir-opt/dataflow-const-prop/checked.rs
new file mode 100644
index 0000000000000..0738a4ee53b86
--- /dev/null
+++ b/src/test/mir-opt/dataflow-const-prop/checked.rs
@@ -0,0 +1,13 @@
+// unit-test: DataflowConstProp
+// compile-flags: -Coverflow-checks=on
+
+// EMIT_MIR checked.main.DataflowConstProp.diff
+#[allow(arithmetic_overflow)]
+fn main() {
+ let a = 1;
+ let b = 2;
+ let c = a + b;
+
+ let d = i32::MAX;
+ let e = d + 1;
+}
diff --git a/src/test/mir-opt/dataflow-const-prop/enum.main.DataflowConstProp.diff b/src/test/mir-opt/dataflow-const-prop/enum.main.DataflowConstProp.diff
new file mode 100644
index 0000000000000..2ced794e628f0
--- /dev/null
+++ b/src/test/mir-opt/dataflow-const-prop/enum.main.DataflowConstProp.diff
@@ -0,0 +1,61 @@
+- // MIR for `main` before DataflowConstProp
++ // MIR for `main` after DataflowConstProp
+
+ fn main() -> () {
+ let mut _0: (); // return place in scope 0 at $DIR/enum.rs:+0:11: +0:11
+ let _1: E; // in scope 0 at $DIR/enum.rs:+1:9: +1:10
+ let mut _3: isize; // in scope 0 at $DIR/enum.rs:+2:23: +2:31
+ scope 1 {
+ debug e => _1; // in scope 1 at $DIR/enum.rs:+1:9: +1:10
+ let _2: i32; // in scope 1 at $DIR/enum.rs:+2:9: +2:10
+ let _4: i32; // in scope 1 at $DIR/enum.rs:+2:29: +2:30
+ let _5: i32; // in scope 1 at $DIR/enum.rs:+2:44: +2:45
+ scope 2 {
+ debug x => _2; // in scope 2 at $DIR/enum.rs:+2:9: +2:10
+ }
+ scope 3 {
+ debug x => _4; // in scope 3 at $DIR/enum.rs:+2:29: +2:30
+ }
+ scope 4 {
+ debug x => _5; // in scope 4 at $DIR/enum.rs:+2:44: +2:45
+ }
+ }
+
+ bb0: {
+ StorageLive(_1); // scope 0 at $DIR/enum.rs:+1:9: +1:10
+ Deinit(_1); // scope 0 at $DIR/enum.rs:+1:13: +1:21
+ ((_1 as V1).0: i32) = const 0_i32; // scope 0 at $DIR/enum.rs:+1:13: +1:21
+ discriminant(_1) = 0; // scope 0 at $DIR/enum.rs:+1:13: +1:21
+ StorageLive(_2); // scope 1 at $DIR/enum.rs:+2:9: +2:10
+ _3 = discriminant(_1); // scope 1 at $DIR/enum.rs:+2:19: +2:20
+ switchInt(move _3) -> [0_isize: bb3, 1_isize: bb1, otherwise: bb2]; // scope 1 at $DIR/enum.rs:+2:13: +2:20
+ }
+
+ bb1: {
+ StorageLive(_5); // scope 1 at $DIR/enum.rs:+2:44: +2:45
+ _5 = ((_1 as V2).0: i32); // scope 1 at $DIR/enum.rs:+2:44: +2:45
+ _2 = _5; // scope 4 at $DIR/enum.rs:+2:50: +2:51
+ StorageDead(_5); // scope 1 at $DIR/enum.rs:+2:50: +2:51
+ goto -> bb4; // scope 1 at $DIR/enum.rs:+2:50: +2:51
+ }
+
+ bb2: {
+ unreachable; // scope 1 at $DIR/enum.rs:+2:19: +2:20
+ }
+
+ bb3: {
+ StorageLive(_4); // scope 1 at $DIR/enum.rs:+2:29: +2:30
+ _4 = ((_1 as V1).0: i32); // scope 1 at $DIR/enum.rs:+2:29: +2:30
+ _2 = _4; // scope 3 at $DIR/enum.rs:+2:35: +2:36
+ StorageDead(_4); // scope 1 at $DIR/enum.rs:+2:35: +2:36
+ goto -> bb4; // scope 1 at $DIR/enum.rs:+2:35: +2:36
+ }
+
+ bb4: {
+ _0 = const (); // scope 0 at $DIR/enum.rs:+0:11: +3:2
+ StorageDead(_2); // scope 1 at $DIR/enum.rs:+3:1: +3:2
+ StorageDead(_1); // scope 0 at $DIR/enum.rs:+3:1: +3:2
+ return; // scope 0 at $DIR/enum.rs:+3:2: +3:2
+ }
+ }
+
diff --git a/src/test/mir-opt/dataflow-const-prop/enum.rs b/src/test/mir-opt/dataflow-const-prop/enum.rs
new file mode 100644
index 0000000000000..13288577dea3f
--- /dev/null
+++ b/src/test/mir-opt/dataflow-const-prop/enum.rs
@@ -0,0 +1,13 @@
+// unit-test: DataflowConstProp
+
+// Not trackable, because variants could be aliased.
+enum E {
+ V1(i32),
+ V2(i32)
+}
+
+// EMIT_MIR enum.main.DataflowConstProp.diff
+fn main() {
+ let e = E::V1(0);
+ let x = match e { E::V1(x) => x, E::V2(x) => x };
+}
diff --git a/src/test/mir-opt/dataflow-const-prop/if.main.DataflowConstProp.diff b/src/test/mir-opt/dataflow-const-prop/if.main.DataflowConstProp.diff
new file mode 100644
index 0000000000000..26808c70fbf2c
--- /dev/null
+++ b/src/test/mir-opt/dataflow-const-prop/if.main.DataflowConstProp.diff
@@ -0,0 +1,112 @@
+- // MIR for `main` before DataflowConstProp
++ // MIR for `main` after DataflowConstProp
+
+ fn main() -> () {
+ let mut _0: (); // return place in scope 0 at $DIR/if.rs:+0:11: +0:11
+ let _1: i32; // in scope 0 at $DIR/if.rs:+1:9: +1:10
+ let mut _3: bool; // in scope 0 at $DIR/if.rs:+2:16: +2:22
+ let mut _4: i32; // in scope 0 at $DIR/if.rs:+2:16: +2:17
+ let mut _6: i32; // in scope 0 at $DIR/if.rs:+3:13: +3:14
+ let mut _8: bool; // in scope 0 at $DIR/if.rs:+5:16: +5:22
+ let mut _9: i32; // in scope 0 at $DIR/if.rs:+5:16: +5:17
+ let mut _10: i32; // in scope 0 at $DIR/if.rs:+5:36: +5:37
+ let mut _12: i32; // in scope 0 at $DIR/if.rs:+6:13: +6:14
+ scope 1 {
+ debug a => _1; // in scope 1 at $DIR/if.rs:+1:9: +1:10
+ let _2: i32; // in scope 1 at $DIR/if.rs:+2:9: +2:10
+ scope 2 {
+ debug b => _2; // in scope 2 at $DIR/if.rs:+2:9: +2:10
+ let _5: i32; // in scope 2 at $DIR/if.rs:+3:9: +3:10
+ scope 3 {
+ debug c => _5; // in scope 3 at $DIR/if.rs:+3:9: +3:10
+ let _7: i32; // in scope 3 at $DIR/if.rs:+5:9: +5:10
+ scope 4 {
+ debug d => _7; // in scope 4 at $DIR/if.rs:+5:9: +5:10
+ let _11: i32; // in scope 4 at $DIR/if.rs:+6:9: +6:10
+ scope 5 {
+ debug e => _11; // in scope 5 at $DIR/if.rs:+6:9: +6:10
+ }
+ }
+ }
+ }
+ }
+
+ bb0: {
+ StorageLive(_1); // scope 0 at $DIR/if.rs:+1:9: +1:10
+ _1 = const 1_i32; // scope 0 at $DIR/if.rs:+1:13: +1:14
+ StorageLive(_2); // scope 1 at $DIR/if.rs:+2:9: +2:10
+ StorageLive(_3); // scope 1 at $DIR/if.rs:+2:16: +2:22
+ StorageLive(_4); // scope 1 at $DIR/if.rs:+2:16: +2:17
+- _4 = _1; // scope 1 at $DIR/if.rs:+2:16: +2:17
+- _3 = Eq(move _4, const 1_i32); // scope 1 at $DIR/if.rs:+2:16: +2:22
++ _4 = const 1_i32; // scope 1 at $DIR/if.rs:+2:16: +2:17
++ _3 = const true; // scope 1 at $DIR/if.rs:+2:16: +2:22
+ StorageDead(_4); // scope 1 at $DIR/if.rs:+2:21: +2:22
+- switchInt(move _3) -> [false: bb2, otherwise: bb1]; // scope 1 at $DIR/if.rs:+2:16: +2:22
++ switchInt(const true) -> [false: bb2, otherwise: bb1]; // scope 1 at $DIR/if.rs:+2:16: +2:22
+ }
+
+ bb1: {
+ _2 = const 2_i32; // scope 1 at $DIR/if.rs:+2:25: +2:26
+ goto -> bb3; // scope 1 at $DIR/if.rs:+2:13: +2:39
+ }
+
+ bb2: {
+ _2 = const 3_i32; // scope 1 at $DIR/if.rs:+2:36: +2:37
+ goto -> bb3; // scope 1 at $DIR/if.rs:+2:13: +2:39
+ }
+
+ bb3: {
+ StorageDead(_3); // scope 1 at $DIR/if.rs:+2:38: +2:39
+ StorageLive(_5); // scope 2 at $DIR/if.rs:+3:9: +3:10
+ StorageLive(_6); // scope 2 at $DIR/if.rs:+3:13: +3:14
+- _6 = _2; // scope 2 at $DIR/if.rs:+3:13: +3:14
+- _5 = Add(move _6, const 1_i32); // scope 2 at $DIR/if.rs:+3:13: +3:18
++ _6 = const 2_i32; // scope 2 at $DIR/if.rs:+3:13: +3:14
++ _5 = const 3_i32; // scope 2 at $DIR/if.rs:+3:13: +3:18
+ StorageDead(_6); // scope 2 at $DIR/if.rs:+3:17: +3:18
+ StorageLive(_7); // scope 3 at $DIR/if.rs:+5:9: +5:10
+ StorageLive(_8); // scope 3 at $DIR/if.rs:+5:16: +5:22
+ StorageLive(_9); // scope 3 at $DIR/if.rs:+5:16: +5:17
+- _9 = _1; // scope 3 at $DIR/if.rs:+5:16: +5:17
+- _8 = Eq(move _9, const 1_i32); // scope 3 at $DIR/if.rs:+5:16: +5:22
++ _9 = const 1_i32; // scope 3 at $DIR/if.rs:+5:16: +5:17
++ _8 = const true; // scope 3 at $DIR/if.rs:+5:16: +5:22
+ StorageDead(_9); // scope 3 at $DIR/if.rs:+5:21: +5:22
+- switchInt(move _8) -> [false: bb5, otherwise: bb4]; // scope 3 at $DIR/if.rs:+5:16: +5:22
++ switchInt(const true) -> [false: bb5, otherwise: bb4]; // scope 3 at $DIR/if.rs:+5:16: +5:22
+ }
+
+ bb4: {
+- _7 = _1; // scope 3 at $DIR/if.rs:+5:25: +5:26
++ _7 = const 1_i32; // scope 3 at $DIR/if.rs:+5:25: +5:26
+ goto -> bb6; // scope 3 at $DIR/if.rs:+5:13: +5:43
+ }
+
+ bb5: {
+ StorageLive(_10); // scope 3 at $DIR/if.rs:+5:36: +5:37
+ _10 = _1; // scope 3 at $DIR/if.rs:+5:36: +5:37
+ _7 = Add(move _10, const 1_i32); // scope 3 at $DIR/if.rs:+5:36: +5:41
+ StorageDead(_10); // scope 3 at $DIR/if.rs:+5:40: +5:41
+ goto -> bb6; // scope 3 at $DIR/if.rs:+5:13: +5:43
+ }
+
+ bb6: {
+ StorageDead(_8); // scope 3 at $DIR/if.rs:+5:42: +5:43
+ StorageLive(_11); // scope 4 at $DIR/if.rs:+6:9: +6:10
+ StorageLive(_12); // scope 4 at $DIR/if.rs:+6:13: +6:14
+- _12 = _7; // scope 4 at $DIR/if.rs:+6:13: +6:14
+- _11 = Add(move _12, const 1_i32); // scope 4 at $DIR/if.rs:+6:13: +6:18
++ _12 = const 1_i32; // scope 4 at $DIR/if.rs:+6:13: +6:14
++ _11 = const 2_i32; // scope 4 at $DIR/if.rs:+6:13: +6:18
+ StorageDead(_12); // scope 4 at $DIR/if.rs:+6:17: +6:18
+ _0 = const (); // scope 0 at $DIR/if.rs:+0:11: +7:2
+ StorageDead(_11); // scope 4 at $DIR/if.rs:+7:1: +7:2
+ StorageDead(_7); // scope 3 at $DIR/if.rs:+7:1: +7:2
+ StorageDead(_5); // scope 2 at $DIR/if.rs:+7:1: +7:2
+ StorageDead(_2); // scope 1 at $DIR/if.rs:+7:1: +7:2
+ StorageDead(_1); // scope 0 at $DIR/if.rs:+7:1: +7:2
+ return; // scope 0 at $DIR/if.rs:+7:2: +7:2
+ }
+ }
+
diff --git a/src/test/mir-opt/dataflow-const-prop/if.rs b/src/test/mir-opt/dataflow-const-prop/if.rs
new file mode 100644
index 0000000000000..34fc35790c17f
--- /dev/null
+++ b/src/test/mir-opt/dataflow-const-prop/if.rs
@@ -0,0 +1,11 @@
+// unit-test: DataflowConstProp
+
+// EMIT_MIR if.main.DataflowConstProp.diff
+fn main() {
+ let a = 1;
+ let b = if a == 1 { 2 } else { 3 };
+ let c = b + 1;
+
+ let d = if a == 1 { a } else { a + 1 };
+ let e = d + 1;
+}
diff --git a/src/test/mir-opt/dataflow-const-prop/inherit_overflow.main.DataflowConstProp.diff b/src/test/mir-opt/dataflow-const-prop/inherit_overflow.main.DataflowConstProp.diff
new file mode 100644
index 0000000000000..bf4557ed3d92c
--- /dev/null
+++ b/src/test/mir-opt/dataflow-const-prop/inherit_overflow.main.DataflowConstProp.diff
@@ -0,0 +1,45 @@
+- // MIR for `main` before DataflowConstProp
++ // MIR for `main` after DataflowConstProp
+
+ fn main() -> () {
+ let mut _0: (); // return place in scope 0 at $DIR/inherit_overflow.rs:+0:11: +0:11
+ let mut _1: u8; // in scope 0 at $DIR/inherit_overflow.rs:+3:13: +3:47
+ let mut _2: u8; // in scope 0 at $DIR/inherit_overflow.rs:+3:13: +3:47
+ let mut _3: u8; // in scope 0 at $DIR/inherit_overflow.rs:+3:13: +3:47
+ scope 1 {
+ }
+ scope 2 (inlined ::add) { // at $DIR/inherit_overflow.rs:7:13: 7:47
+ debug self => _2; // in scope 2 at $SRC_DIR/core/src/ops/arith.rs:LL:COL
+ debug other => _3; // in scope 2 at $SRC_DIR/core/src/ops/arith.rs:LL:COL
+ let mut _4: u8; // in scope 2 at $SRC_DIR/core/src/ops/arith.rs:LL:COL
+ let mut _5: u8; // in scope 2 at $SRC_DIR/core/src/ops/arith.rs:LL:COL
+ let mut _6: (u8, bool); // in scope 2 at $SRC_DIR/core/src/ops/arith.rs:LL:COL
+ }
+
+ bb0: {
+ StorageLive(_1); // scope 0 at $DIR/inherit_overflow.rs:+3:13: +3:47
+ StorageLive(_2); // scope 0 at $DIR/inherit_overflow.rs:+3:13: +3:47
+ _2 = const u8::MAX; // scope 0 at $DIR/inherit_overflow.rs:+3:13: +3:47
+ StorageLive(_3); // scope 0 at $DIR/inherit_overflow.rs:+3:13: +3:47
+ _3 = const 1_u8; // scope 0 at $DIR/inherit_overflow.rs:+3:13: +3:47
+ StorageLive(_4); // scope 2 at $SRC_DIR/core/src/ops/arith.rs:LL:COL
+ _4 = const u8::MAX; // scope 2 at $SRC_DIR/core/src/ops/arith.rs:LL:COL
+ StorageLive(_5); // scope 2 at $SRC_DIR/core/src/ops/arith.rs:LL:COL
+ _5 = const 1_u8; // scope 2 at $SRC_DIR/core/src/ops/arith.rs:LL:COL
+ _6 = CheckedAdd(const u8::MAX, const 1_u8); // scope 2 at $SRC_DIR/core/src/ops/arith.rs:LL:COL
+ assert(!move (_6.1: bool), "attempt to compute `{} + {}`, which would overflow", const u8::MAX, const 1_u8) -> bb1; // scope 2 at $SRC_DIR/core/src/ops/arith.rs:LL:COL
+ }
+
+ bb1: {
+- _1 = move (_6.0: u8); // scope 2 at $SRC_DIR/core/src/ops/arith.rs:LL:COL
++ _1 = const 0_u8; // scope 2 at $SRC_DIR/core/src/ops/arith.rs:LL:COL
+ StorageDead(_5); // scope 2 at $SRC_DIR/core/src/ops/arith.rs:LL:COL
+ StorageDead(_4); // scope 2 at $SRC_DIR/core/src/ops/arith.rs:LL:COL
+ StorageDead(_3); // scope 0 at $DIR/inherit_overflow.rs:+3:13: +3:47
+ StorageDead(_2); // scope 0 at $DIR/inherit_overflow.rs:+3:13: +3:47
+ StorageDead(_1); // scope 0 at $DIR/inherit_overflow.rs:+3:47: +3:48
+ nop; // scope 0 at $DIR/inherit_overflow.rs:+0:11: +4:2
+ return; // scope 0 at $DIR/inherit_overflow.rs:+4:2: +4:2
+ }
+ }
+
diff --git a/src/test/mir-opt/dataflow-const-prop/inherit_overflow.rs b/src/test/mir-opt/dataflow-const-prop/inherit_overflow.rs
new file mode 100644
index 0000000000000..2f2d9d0102d12
--- /dev/null
+++ b/src/test/mir-opt/dataflow-const-prop/inherit_overflow.rs
@@ -0,0 +1,8 @@
+// compile-flags: -Zunsound-mir-opts
+
+// EMIT_MIR inherit_overflow.main.DataflowConstProp.diff
+fn main() {
+ // After inlining, this will contain a `CheckedBinaryOp`. The overflow
+ // must be ignored by the constant propagation to avoid triggering a panic.
+ let _ = ::add(255, 1);
+}
diff --git a/src/test/mir-opt/dataflow-const-prop/issue_81605.f.DataflowConstProp.diff b/src/test/mir-opt/dataflow-const-prop/issue_81605.f.DataflowConstProp.diff
new file mode 100644
index 0000000000000..881d80f7c0326
--- /dev/null
+++ b/src/test/mir-opt/dataflow-const-prop/issue_81605.f.DataflowConstProp.diff
@@ -0,0 +1,35 @@
+- // MIR for `f` before DataflowConstProp
++ // MIR for `f` after DataflowConstProp
+
+ fn f() -> usize {
+ let mut _0: usize; // return place in scope 0 at $DIR/issue_81605.rs:+0:11: +0:16
+ let mut _1: usize; // in scope 0 at $DIR/issue_81605.rs:+1:9: +1:33
+ let mut _2: bool; // in scope 0 at $DIR/issue_81605.rs:+1:12: +1:16
+
+ bb0: {
+ StorageLive(_1); // scope 0 at $DIR/issue_81605.rs:+1:9: +1:33
+ StorageLive(_2); // scope 0 at $DIR/issue_81605.rs:+1:12: +1:16
+ _2 = const true; // scope 0 at $DIR/issue_81605.rs:+1:12: +1:16
+- switchInt(move _2) -> [false: bb2, otherwise: bb1]; // scope 0 at $DIR/issue_81605.rs:+1:12: +1:16
++ switchInt(const true) -> [false: bb2, otherwise: bb1]; // scope 0 at $DIR/issue_81605.rs:+1:12: +1:16
+ }
+
+ bb1: {
+ _1 = const 1_usize; // scope 0 at $DIR/issue_81605.rs:+1:19: +1:20
+ goto -> bb3; // scope 0 at $DIR/issue_81605.rs:+1:9: +1:33
+ }
+
+ bb2: {
+ _1 = const 2_usize; // scope 0 at $DIR/issue_81605.rs:+1:30: +1:31
+ goto -> bb3; // scope 0 at $DIR/issue_81605.rs:+1:9: +1:33
+ }
+
+ bb3: {
+ StorageDead(_2); // scope 0 at $DIR/issue_81605.rs:+1:32: +1:33
+- _0 = Add(const 1_usize, move _1); // scope 0 at $DIR/issue_81605.rs:+1:5: +1:33
++ _0 = const 2_usize; // scope 0 at $DIR/issue_81605.rs:+1:5: +1:33
+ StorageDead(_1); // scope 0 at $DIR/issue_81605.rs:+1:32: +1:33
+ return; // scope 0 at $DIR/issue_81605.rs:+2:2: +2:2
+ }
+ }
+
diff --git a/src/test/mir-opt/dataflow-const-prop/issue_81605.rs b/src/test/mir-opt/dataflow-const-prop/issue_81605.rs
new file mode 100644
index 0000000000000..d75e2a28bef6b
--- /dev/null
+++ b/src/test/mir-opt/dataflow-const-prop/issue_81605.rs
@@ -0,0 +1,10 @@
+// unit-test: DataflowConstProp
+
+// EMIT_MIR issue_81605.f.DataflowConstProp.diff
+fn f() -> usize {
+ 1 + if true { 1 } else { 2 }
+}
+
+fn main() {
+ f();
+}
diff --git a/src/test/mir-opt/dataflow-const-prop/ref_without_sb.main.DataflowConstProp.diff b/src/test/mir-opt/dataflow-const-prop/ref_without_sb.main.DataflowConstProp.diff
new file mode 100644
index 0000000000000..158f187f15769
--- /dev/null
+++ b/src/test/mir-opt/dataflow-const-prop/ref_without_sb.main.DataflowConstProp.diff
@@ -0,0 +1,55 @@
+- // MIR for `main` before DataflowConstProp
++ // MIR for `main` after DataflowConstProp
+
+ fn main() -> () {
+ let mut _0: (); // return place in scope 0 at $DIR/ref_without_sb.rs:+0:11: +0:11
+ let mut _1: i32; // in scope 0 at $DIR/ref_without_sb.rs:+1:9: +1:14
+ let _2: (); // in scope 0 at $DIR/ref_without_sb.rs:+2:5: +2:15
+ let mut _3: &i32; // in scope 0 at $DIR/ref_without_sb.rs:+2:12: +2:14
+ let _4: &i32; // in scope 0 at $DIR/ref_without_sb.rs:+2:12: +2:14
+ let _5: (); // in scope 0 at $DIR/ref_without_sb.rs:+4:5: +4:20
+ scope 1 {
+ debug a => _1; // in scope 1 at $DIR/ref_without_sb.rs:+1:9: +1:14
+ let _6: i32; // in scope 1 at $DIR/ref_without_sb.rs:+6:9: +6:10
+ scope 2 {
+ debug b => _6; // in scope 2 at $DIR/ref_without_sb.rs:+6:9: +6:10
+ }
+ }
+
+ bb0: {
+ StorageLive(_1); // scope 0 at $DIR/ref_without_sb.rs:+1:9: +1:14
+ _1 = const 0_i32; // scope 0 at $DIR/ref_without_sb.rs:+1:17: +1:18
+ StorageLive(_2); // scope 1 at $DIR/ref_without_sb.rs:+2:5: +2:15
+ StorageLive(_3); // scope 1 at $DIR/ref_without_sb.rs:+2:12: +2:14
+ StorageLive(_4); // scope 1 at $DIR/ref_without_sb.rs:+2:12: +2:14
+ _4 = &_1; // scope 1 at $DIR/ref_without_sb.rs:+2:12: +2:14
+ _3 = &(*_4); // scope 1 at $DIR/ref_without_sb.rs:+2:12: +2:14
+ _2 = escape::(move _3) -> bb1; // scope 1 at $DIR/ref_without_sb.rs:+2:5: +2:15
+ // mir::Constant
+ // + span: $DIR/ref_without_sb.rs:12:5: 12:11
+ // + literal: Const { ty: for<'a> fn(&'a i32) {escape::}, val: Value() }
+ }
+
+ bb1: {
+ StorageDead(_3); // scope 1 at $DIR/ref_without_sb.rs:+2:14: +2:15
+ StorageDead(_4); // scope 1 at $DIR/ref_without_sb.rs:+2:15: +2:16
+ StorageDead(_2); // scope 1 at $DIR/ref_without_sb.rs:+2:15: +2:16
+ _1 = const 1_i32; // scope 1 at $DIR/ref_without_sb.rs:+3:5: +3:10
+ StorageLive(_5); // scope 1 at $DIR/ref_without_sb.rs:+4:5: +4:20
+ _5 = some_function() -> bb2; // scope 1 at $DIR/ref_without_sb.rs:+4:5: +4:20
+ // mir::Constant
+ // + span: $DIR/ref_without_sb.rs:14:5: 14:18
+ // + literal: Const { ty: fn() {some_function}, val: Value() }
+ }
+
+ bb2: {
+ StorageDead(_5); // scope 1 at $DIR/ref_without_sb.rs:+4:20: +4:21
+ StorageLive(_6); // scope 1 at $DIR/ref_without_sb.rs:+6:9: +6:10
+ _6 = _1; // scope 1 at $DIR/ref_without_sb.rs:+6:13: +6:14
+ _0 = const (); // scope 0 at $DIR/ref_without_sb.rs:+0:11: +7:2
+ StorageDead(_6); // scope 1 at $DIR/ref_without_sb.rs:+7:1: +7:2
+ StorageDead(_1); // scope 0 at $DIR/ref_without_sb.rs:+7:1: +7:2
+ return; // scope 0 at $DIR/ref_without_sb.rs:+7:2: +7:2
+ }
+ }
+
diff --git a/src/test/mir-opt/dataflow-const-prop/ref_without_sb.rs b/src/test/mir-opt/dataflow-const-prop/ref_without_sb.rs
new file mode 100644
index 0000000000000..2fd480b0968af
--- /dev/null
+++ b/src/test/mir-opt/dataflow-const-prop/ref_without_sb.rs
@@ -0,0 +1,17 @@
+// unit-test: DataflowConstProp
+
+#[inline(never)]
+fn escape(x: &T) {}
+
+#[inline(never)]
+fn some_function() {}
+
+// EMIT_MIR ref_without_sb.main.DataflowConstProp.diff
+fn main() {
+ let mut a = 0;
+ escape(&a);
+ a = 1;
+ some_function();
+ // This should currently not be propagated.
+ let b = a;
+}
diff --git a/src/test/mir-opt/dataflow-const-prop/repr_transparent.main.DataflowConstProp.diff b/src/test/mir-opt/dataflow-const-prop/repr_transparent.main.DataflowConstProp.diff
new file mode 100644
index 0000000000000..f66b00a9a224b
--- /dev/null
+++ b/src/test/mir-opt/dataflow-const-prop/repr_transparent.main.DataflowConstProp.diff
@@ -0,0 +1,44 @@
+- // MIR for `main` before DataflowConstProp
++ // MIR for `main` after DataflowConstProp
+
+ fn main() -> () {
+ let mut _0: (); // return place in scope 0 at $DIR/repr_transparent.rs:+0:11: +0:11
+ let _1: I32; // in scope 0 at $DIR/repr_transparent.rs:+1:9: +1:10
+ let mut _3: i32; // in scope 0 at $DIR/repr_transparent.rs:+2:17: +2:26
+ let mut _4: i32; // in scope 0 at $DIR/repr_transparent.rs:+2:17: +2:20
+ let mut _5: i32; // in scope 0 at $DIR/repr_transparent.rs:+2:23: +2:26
+ scope 1 {
+ debug x => _1; // in scope 1 at $DIR/repr_transparent.rs:+1:9: +1:10
+ let _2: I32; // in scope 1 at $DIR/repr_transparent.rs:+2:9: +2:10
+ scope 2 {
+ debug y => _2; // in scope 2 at $DIR/repr_transparent.rs:+2:9: +2:10
+ }
+ }
+
+ bb0: {
+ StorageLive(_1); // scope 0 at $DIR/repr_transparent.rs:+1:9: +1:10
+ Deinit(_1); // scope 0 at $DIR/repr_transparent.rs:+1:13: +1:19
+ (_1.0: i32) = const 0_i32; // scope 0 at $DIR/repr_transparent.rs:+1:13: +1:19
+ StorageLive(_2); // scope 1 at $DIR/repr_transparent.rs:+2:9: +2:10
+ StorageLive(_3); // scope 1 at $DIR/repr_transparent.rs:+2:17: +2:26
+ StorageLive(_4); // scope 1 at $DIR/repr_transparent.rs:+2:17: +2:20
+- _4 = (_1.0: i32); // scope 1 at $DIR/repr_transparent.rs:+2:17: +2:20
++ _4 = const 0_i32; // scope 1 at $DIR/repr_transparent.rs:+2:17: +2:20
+ StorageLive(_5); // scope 1 at $DIR/repr_transparent.rs:+2:23: +2:26
+- _5 = (_1.0: i32); // scope 1 at $DIR/repr_transparent.rs:+2:23: +2:26
+- _3 = Add(move _4, move _5); // scope 1 at $DIR/repr_transparent.rs:+2:17: +2:26
++ _5 = const 0_i32; // scope 1 at $DIR/repr_transparent.rs:+2:23: +2:26
++ _3 = const 0_i32; // scope 1 at $DIR/repr_transparent.rs:+2:17: +2:26
+ StorageDead(_5); // scope 1 at $DIR/repr_transparent.rs:+2:25: +2:26
+ StorageDead(_4); // scope 1 at $DIR/repr_transparent.rs:+2:25: +2:26
+ Deinit(_2); // scope 1 at $DIR/repr_transparent.rs:+2:13: +2:27
+- (_2.0: i32) = move _3; // scope 1 at $DIR/repr_transparent.rs:+2:13: +2:27
++ (_2.0: i32) = const 0_i32; // scope 1 at $DIR/repr_transparent.rs:+2:13: +2:27
+ StorageDead(_3); // scope 1 at $DIR/repr_transparent.rs:+2:26: +2:27
+ _0 = const (); // scope 0 at $DIR/repr_transparent.rs:+0:11: +3:2
+ StorageDead(_2); // scope 1 at $DIR/repr_transparent.rs:+3:1: +3:2
+ StorageDead(_1); // scope 0 at $DIR/repr_transparent.rs:+3:1: +3:2
+ return; // scope 0 at $DIR/repr_transparent.rs:+3:2: +3:2
+ }
+ }
+
diff --git a/src/test/mir-opt/dataflow-const-prop/repr_transparent.rs b/src/test/mir-opt/dataflow-const-prop/repr_transparent.rs
new file mode 100644
index 0000000000000..4ce0ca4dff46f
--- /dev/null
+++ b/src/test/mir-opt/dataflow-const-prop/repr_transparent.rs
@@ -0,0 +1,12 @@
+// unit-test: DataflowConstProp
+
+// The struct has scalar ABI, but is not a scalar type.
+// Make sure that we handle this correctly.
+#[repr(transparent)]
+struct I32(i32);
+
+// EMIT_MIR repr_transparent.main.DataflowConstProp.diff
+fn main() {
+ let x = I32(0);
+ let y = I32(x.0 + x.0);
+}
diff --git a/src/test/mir-opt/dataflow-const-prop/self_assign.main.DataflowConstProp.diff b/src/test/mir-opt/dataflow-const-prop/self_assign.main.DataflowConstProp.diff
new file mode 100644
index 0000000000000..df08eff94cb27
--- /dev/null
+++ b/src/test/mir-opt/dataflow-const-prop/self_assign.main.DataflowConstProp.diff
@@ -0,0 +1,46 @@
+- // MIR for `main` before DataflowConstProp
++ // MIR for `main` after DataflowConstProp
+
+ fn main() -> () {
+ let mut _0: (); // return place in scope 0 at $DIR/self_assign.rs:+0:11: +0:11
+ let mut _1: i32; // in scope 0 at $DIR/self_assign.rs:+1:9: +1:14
+ let mut _2: i32; // in scope 0 at $DIR/self_assign.rs:+2:9: +2:10
+ let mut _3: i32; // in scope 0 at $DIR/self_assign.rs:+3:9: +3:10
+ let mut _5: &i32; // in scope 0 at $DIR/self_assign.rs:+6:9: +6:10
+ let mut _6: i32; // in scope 0 at $DIR/self_assign.rs:+7:9: +7:11
+ scope 1 {
+ debug a => _1; // in scope 1 at $DIR/self_assign.rs:+1:9: +1:14
+ let mut _4: &i32; // in scope 1 at $DIR/self_assign.rs:+5:9: +5:14
+ scope 2 {
+ debug b => _4; // in scope 2 at $DIR/self_assign.rs:+5:9: +5:14
+ }
+ }
+
+ bb0: {
+ StorageLive(_1); // scope 0 at $DIR/self_assign.rs:+1:9: +1:14
+ _1 = const 0_i32; // scope 0 at $DIR/self_assign.rs:+1:17: +1:18
+ StorageLive(_2); // scope 1 at $DIR/self_assign.rs:+2:9: +2:10
+ _2 = _1; // scope 1 at $DIR/self_assign.rs:+2:9: +2:10
+ _1 = Add(move _2, const 1_i32); // scope 1 at $DIR/self_assign.rs:+2:5: +2:14
+ StorageDead(_2); // scope 1 at $DIR/self_assign.rs:+2:13: +2:14
+ StorageLive(_3); // scope 1 at $DIR/self_assign.rs:+3:9: +3:10
+ _3 = _1; // scope 1 at $DIR/self_assign.rs:+3:9: +3:10
+ _1 = move _3; // scope 1 at $DIR/self_assign.rs:+3:5: +3:10
+ StorageDead(_3); // scope 1 at $DIR/self_assign.rs:+3:9: +3:10
+ StorageLive(_4); // scope 1 at $DIR/self_assign.rs:+5:9: +5:14
+ _4 = &_1; // scope 1 at $DIR/self_assign.rs:+5:17: +5:19
+ StorageLive(_5); // scope 2 at $DIR/self_assign.rs:+6:9: +6:10
+ _5 = _4; // scope 2 at $DIR/self_assign.rs:+6:9: +6:10
+ _4 = move _5; // scope 2 at $DIR/self_assign.rs:+6:5: +6:10
+ StorageDead(_5); // scope 2 at $DIR/self_assign.rs:+6:9: +6:10
+ StorageLive(_6); // scope 2 at $DIR/self_assign.rs:+7:9: +7:11
+ _6 = (*_4); // scope 2 at $DIR/self_assign.rs:+7:9: +7:11
+ _1 = move _6; // scope 2 at $DIR/self_assign.rs:+7:5: +7:11
+ StorageDead(_6); // scope 2 at $DIR/self_assign.rs:+7:10: +7:11
+ _0 = const (); // scope 0 at $DIR/self_assign.rs:+0:11: +8:2
+ StorageDead(_4); // scope 1 at $DIR/self_assign.rs:+8:1: +8:2
+ StorageDead(_1); // scope 0 at $DIR/self_assign.rs:+8:1: +8:2
+ return; // scope 0 at $DIR/self_assign.rs:+8:2: +8:2
+ }
+ }
+
diff --git a/src/test/mir-opt/dataflow-const-prop/self_assign.rs b/src/test/mir-opt/dataflow-const-prop/self_assign.rs
new file mode 100644
index 0000000000000..8de2195f93ba4
--- /dev/null
+++ b/src/test/mir-opt/dataflow-const-prop/self_assign.rs
@@ -0,0 +1,12 @@
+// unit-test: DataflowConstProp
+
+// EMIT_MIR self_assign.main.DataflowConstProp.diff
+fn main() {
+ let mut a = 0;
+ a = a + 1;
+ a = a;
+
+ let mut b = &a;
+ b = b;
+ a = *b;
+}
diff --git a/src/test/mir-opt/dataflow-const-prop/self_assign_add.main.DataflowConstProp.diff b/src/test/mir-opt/dataflow-const-prop/self_assign_add.main.DataflowConstProp.diff
new file mode 100644
index 0000000000000..c09e4061ededf
--- /dev/null
+++ b/src/test/mir-opt/dataflow-const-prop/self_assign_add.main.DataflowConstProp.diff
@@ -0,0 +1,23 @@
+- // MIR for `main` before DataflowConstProp
++ // MIR for `main` after DataflowConstProp
+
+ fn main() -> () {
+ let mut _0: (); // return place in scope 0 at $DIR/self_assign_add.rs:+0:11: +0:11
+ let mut _1: i32; // in scope 0 at $DIR/self_assign_add.rs:+1:9: +1:14
+ scope 1 {
+ debug a => _1; // in scope 1 at $DIR/self_assign_add.rs:+1:9: +1:14
+ }
+
+ bb0: {
+ StorageLive(_1); // scope 0 at $DIR/self_assign_add.rs:+1:9: +1:14
+ _1 = const 0_i32; // scope 0 at $DIR/self_assign_add.rs:+1:17: +1:18
+- _1 = Add(_1, const 1_i32); // scope 1 at $DIR/self_assign_add.rs:+2:5: +2:11
+- _1 = Add(_1, const 1_i32); // scope 1 at $DIR/self_assign_add.rs:+3:5: +3:11
++ _1 = const 1_i32; // scope 1 at $DIR/self_assign_add.rs:+2:5: +2:11
++ _1 = const 2_i32; // scope 1 at $DIR/self_assign_add.rs:+3:5: +3:11
+ _0 = const (); // scope 0 at $DIR/self_assign_add.rs:+0:11: +4:2
+ StorageDead(_1); // scope 0 at $DIR/self_assign_add.rs:+4:1: +4:2
+ return; // scope 0 at $DIR/self_assign_add.rs:+4:2: +4:2
+ }
+ }
+
diff --git a/src/test/mir-opt/dataflow-const-prop/self_assign_add.rs b/src/test/mir-opt/dataflow-const-prop/self_assign_add.rs
new file mode 100644
index 0000000000000..e3282762459a0
--- /dev/null
+++ b/src/test/mir-opt/dataflow-const-prop/self_assign_add.rs
@@ -0,0 +1,8 @@
+// unit-test: DataflowConstProp
+
+// EMIT_MIR self_assign_add.main.DataflowConstProp.diff
+fn main() {
+ let mut a = 0;
+ a += 1;
+ a += 1;
+}
diff --git a/src/test/mir-opt/dataflow-const-prop/sibling_ptr.main.DataflowConstProp.diff b/src/test/mir-opt/dataflow-const-prop/sibling_ptr.main.DataflowConstProp.diff
new file mode 100644
index 0000000000000..8126d4b8585e6
--- /dev/null
+++ b/src/test/mir-opt/dataflow-const-prop/sibling_ptr.main.DataflowConstProp.diff
@@ -0,0 +1,56 @@
+- // MIR for `main` before DataflowConstProp
++ // MIR for `main` after DataflowConstProp
+
+ fn main() -> () {
+ let mut _0: (); // return place in scope 0 at $DIR/sibling_ptr.rs:+0:11: +0:11
+ let mut _1: (u8, u8); // in scope 0 at $DIR/sibling_ptr.rs:+1:9: +1:14
+ let _2: (); // in scope 0 at $DIR/sibling_ptr.rs:+2:5: +5:6
+ let mut _4: *mut u8; // in scope 0 at $DIR/sibling_ptr.rs:+4:10: +4:18
+ let mut _5: *mut u8; // in scope 0 at $DIR/sibling_ptr.rs:+4:10: +4:11
+ scope 1 {
+ debug x => _1; // in scope 1 at $DIR/sibling_ptr.rs:+1:9: +1:14
+ let _6: u8; // in scope 1 at $DIR/sibling_ptr.rs:+6:9: +6:11
+ scope 2 {
+ let _3: *mut u8; // in scope 2 at $DIR/sibling_ptr.rs:+3:13: +3:14
+ scope 3 {
+ debug p => _3; // in scope 3 at $DIR/sibling_ptr.rs:+3:13: +3:14
+ }
+ }
+ scope 4 {
+ debug x1 => _6; // in scope 4 at $DIR/sibling_ptr.rs:+6:9: +6:11
+ }
+ }
+
+ bb0: {
+ StorageLive(_1); // scope 0 at $DIR/sibling_ptr.rs:+1:9: +1:14
+ Deinit(_1); // scope 0 at $DIR/sibling_ptr.rs:+1:27: +1:33
+ (_1.0: u8) = const 0_u8; // scope 0 at $DIR/sibling_ptr.rs:+1:27: +1:33
+ (_1.1: u8) = const 0_u8; // scope 0 at $DIR/sibling_ptr.rs:+1:27: +1:33
+ StorageLive(_2); // scope 1 at $DIR/sibling_ptr.rs:+2:5: +5:6
+ StorageLive(_3); // scope 2 at $DIR/sibling_ptr.rs:+3:13: +3:14
+ _3 = &raw mut (_1.0: u8); // scope 2 at $SRC_DIR/core/src/ptr/mod.rs:LL:COL
+ StorageLive(_4); // scope 3 at $DIR/sibling_ptr.rs:+4:10: +4:18
+ StorageLive(_5); // scope 3 at $DIR/sibling_ptr.rs:+4:10: +4:11
+ _5 = _3; // scope 3 at $DIR/sibling_ptr.rs:+4:10: +4:11
+ _4 = ptr::mut_ptr::::add(move _5, const 1_usize) -> bb1; // scope 3 at $DIR/sibling_ptr.rs:+4:10: +4:18
+ // mir::Constant
+ // + span: $DIR/sibling_ptr.rs:8:12: 8:15
+ // + literal: Const { ty: unsafe fn(*mut u8, usize) -> *mut u8 {ptr::mut_ptr::::add}, val: Value() }
+ }
+
+ bb1: {
+ StorageDead(_5); // scope 3 at $DIR/sibling_ptr.rs:+4:17: +4:18
+ (*_4) = const 1_u8; // scope 3 at $DIR/sibling_ptr.rs:+4:9: +4:22
+ StorageDead(_4); // scope 3 at $DIR/sibling_ptr.rs:+4:22: +4:23
+ _2 = const (); // scope 2 at $DIR/sibling_ptr.rs:+2:5: +5:6
+ StorageDead(_3); // scope 2 at $DIR/sibling_ptr.rs:+5:5: +5:6
+ StorageDead(_2); // scope 1 at $DIR/sibling_ptr.rs:+5:5: +5:6
+ StorageLive(_6); // scope 1 at $DIR/sibling_ptr.rs:+6:9: +6:11
+ _6 = (_1.1: u8); // scope 1 at $DIR/sibling_ptr.rs:+6:14: +6:17
+ _0 = const (); // scope 0 at $DIR/sibling_ptr.rs:+0:11: +7:2
+ StorageDead(_6); // scope 1 at $DIR/sibling_ptr.rs:+7:1: +7:2
+ StorageDead(_1); // scope 0 at $DIR/sibling_ptr.rs:+7:1: +7:2
+ return; // scope 0 at $DIR/sibling_ptr.rs:+7:2: +7:2
+ }
+ }
+
diff --git a/src/test/mir-opt/dataflow-const-prop/sibling_ptr.rs b/src/test/mir-opt/dataflow-const-prop/sibling_ptr.rs
new file mode 100644
index 0000000000000..87ef00d18295f
--- /dev/null
+++ b/src/test/mir-opt/dataflow-const-prop/sibling_ptr.rs
@@ -0,0 +1,11 @@
+// unit-test: DataflowConstProp
+
+// EMIT_MIR sibling_ptr.main.DataflowConstProp.diff
+fn main() {
+ let mut x: (u8, u8) = (0, 0);
+ unsafe {
+ let p = std::ptr::addr_of_mut!(x.0);
+ *p.add(1) = 1;
+ }
+ let x1 = x.1; // should not be propagated
+}
diff --git a/src/test/mir-opt/dataflow-const-prop/struct.main.DataflowConstProp.diff b/src/test/mir-opt/dataflow-const-prop/struct.main.DataflowConstProp.diff
new file mode 100644
index 0000000000000..cfb2706c167cd
--- /dev/null
+++ b/src/test/mir-opt/dataflow-const-prop/struct.main.DataflowConstProp.diff
@@ -0,0 +1,52 @@
+- // MIR for `main` before DataflowConstProp
++ // MIR for `main` after DataflowConstProp
+
+ fn main() -> () {
+ let mut _0: (); // return place in scope 0 at $DIR/struct.rs:+0:11: +0:11
+ let mut _1: S; // in scope 0 at $DIR/struct.rs:+1:9: +1:14
+ let mut _3: i32; // in scope 0 at $DIR/struct.rs:+2:13: +2:16
+ let mut _5: i32; // in scope 0 at $DIR/struct.rs:+4:13: +4:14
+ let mut _6: i32; // in scope 0 at $DIR/struct.rs:+4:17: +4:20
+ scope 1 {
+ debug s => _1; // in scope 1 at $DIR/struct.rs:+1:9: +1:14
+ let _2: i32; // in scope 1 at $DIR/struct.rs:+2:9: +2:10
+ scope 2 {
+ debug a => _2; // in scope 2 at $DIR/struct.rs:+2:9: +2:10
+ let _4: i32; // in scope 2 at $DIR/struct.rs:+4:9: +4:10
+ scope 3 {
+ debug b => _4; // in scope 3 at $DIR/struct.rs:+4:9: +4:10
+ }
+ }
+ }
+
+ bb0: {
+ StorageLive(_1); // scope 0 at $DIR/struct.rs:+1:9: +1:14
+ Deinit(_1); // scope 0 at $DIR/struct.rs:+1:17: +1:21
+ (_1.0: i32) = const 1_i32; // scope 0 at $DIR/struct.rs:+1:17: +1:21
+ StorageLive(_2); // scope 1 at $DIR/struct.rs:+2:9: +2:10
+ StorageLive(_3); // scope 1 at $DIR/struct.rs:+2:13: +2:16
+- _3 = (_1.0: i32); // scope 1 at $DIR/struct.rs:+2:13: +2:16
+- _2 = Add(move _3, const 2_i32); // scope 1 at $DIR/struct.rs:+2:13: +2:20
++ _3 = const 1_i32; // scope 1 at $DIR/struct.rs:+2:13: +2:16
++ _2 = const 3_i32; // scope 1 at $DIR/struct.rs:+2:13: +2:20
+ StorageDead(_3); // scope 1 at $DIR/struct.rs:+2:19: +2:20
+ (_1.0: i32) = const 3_i32; // scope 2 at $DIR/struct.rs:+3:5: +3:12
+ StorageLive(_4); // scope 2 at $DIR/struct.rs:+4:9: +4:10
+ StorageLive(_5); // scope 2 at $DIR/struct.rs:+4:13: +4:14
+- _5 = _2; // scope 2 at $DIR/struct.rs:+4:13: +4:14
++ _5 = const 3_i32; // scope 2 at $DIR/struct.rs:+4:13: +4:14
+ StorageLive(_6); // scope 2 at $DIR/struct.rs:+4:17: +4:20
+- _6 = (_1.0: i32); // scope 2 at $DIR/struct.rs:+4:17: +4:20
+- _4 = Add(move _5, move _6); // scope 2 at $DIR/struct.rs:+4:13: +4:20
++ _6 = const 3_i32; // scope 2 at $DIR/struct.rs:+4:17: +4:20
++ _4 = const 6_i32; // scope 2 at $DIR/struct.rs:+4:13: +4:20
+ StorageDead(_6); // scope 2 at $DIR/struct.rs:+4:19: +4:20
+ StorageDead(_5); // scope 2 at $DIR/struct.rs:+4:19: +4:20
+ _0 = const (); // scope 0 at $DIR/struct.rs:+0:11: +5:2
+ StorageDead(_4); // scope 2 at $DIR/struct.rs:+5:1: +5:2
+ StorageDead(_2); // scope 1 at $DIR/struct.rs:+5:1: +5:2
+ StorageDead(_1); // scope 0 at $DIR/struct.rs:+5:1: +5:2
+ return; // scope 0 at $DIR/struct.rs:+5:2: +5:2
+ }
+ }
+
diff --git a/src/test/mir-opt/dataflow-const-prop/struct.rs b/src/test/mir-opt/dataflow-const-prop/struct.rs
new file mode 100644
index 0000000000000..841b279e03eee
--- /dev/null
+++ b/src/test/mir-opt/dataflow-const-prop/struct.rs
@@ -0,0 +1,11 @@
+// unit-test: DataflowConstProp
+
+struct S(i32);
+
+// EMIT_MIR struct.main.DataflowConstProp.diff
+fn main() {
+ let mut s = S(1);
+ let a = s.0 + 2;
+ s.0 = 3;
+ let b = a + s.0;
+}
diff --git a/src/test/mir-opt/dataflow-const-prop/terminator.main.DataflowConstProp.diff b/src/test/mir-opt/dataflow-const-prop/terminator.main.DataflowConstProp.diff
new file mode 100644
index 0000000000000..8018400e798a7
--- /dev/null
+++ b/src/test/mir-opt/dataflow-const-prop/terminator.main.DataflowConstProp.diff
@@ -0,0 +1,40 @@
+- // MIR for `main` before DataflowConstProp
++ // MIR for `main` after DataflowConstProp
+
+ fn main() -> () {
+ let mut _0: (); // return place in scope 0 at $DIR/terminator.rs:+0:11: +0:11
+ let _1: i32; // in scope 0 at $DIR/terminator.rs:+1:9: +1:10
+ let _2: (); // in scope 0 at $DIR/terminator.rs:+3:5: +3:15
+ let mut _3: i32; // in scope 0 at $DIR/terminator.rs:+3:9: +3:14
+ let mut _4: i32; // in scope 0 at $DIR/terminator.rs:+3:9: +3:10
+ scope 1 {
+ debug a => _1; // in scope 1 at $DIR/terminator.rs:+1:9: +1:10
+ }
+
+ bb0: {
+ StorageLive(_1); // scope 0 at $DIR/terminator.rs:+1:9: +1:10
+ _1 = const 1_i32; // scope 0 at $DIR/terminator.rs:+1:13: +1:14
+ StorageLive(_2); // scope 1 at $DIR/terminator.rs:+3:5: +3:15
+ StorageLive(_3); // scope 1 at $DIR/terminator.rs:+3:9: +3:14
+ StorageLive(_4); // scope 1 at $DIR/terminator.rs:+3:9: +3:10
+- _4 = _1; // scope 1 at $DIR/terminator.rs:+3:9: +3:10
+- _3 = Add(move _4, const 1_i32); // scope 1 at $DIR/terminator.rs:+3:9: +3:14
++ _4 = const 1_i32; // scope 1 at $DIR/terminator.rs:+3:9: +3:10
++ _3 = const 2_i32; // scope 1 at $DIR/terminator.rs:+3:9: +3:14
+ StorageDead(_4); // scope 1 at $DIR/terminator.rs:+3:13: +3:14
+- _2 = foo(move _3) -> bb1; // scope 1 at $DIR/terminator.rs:+3:5: +3:15
++ _2 = foo(const 2_i32) -> bb1; // scope 1 at $DIR/terminator.rs:+3:5: +3:15
+ // mir::Constant
+ // + span: $DIR/terminator.rs:9:5: 9:8
+ // + literal: Const { ty: fn(i32) {foo}, val: Value() }
+ }
+
+ bb1: {
+ StorageDead(_3); // scope 1 at $DIR/terminator.rs:+3:14: +3:15
+ StorageDead(_2); // scope 1 at $DIR/terminator.rs:+3:15: +3:16
+ _0 = const (); // scope 0 at $DIR/terminator.rs:+0:11: +4:2
+ StorageDead(_1); // scope 0 at $DIR/terminator.rs:+4:1: +4:2
+ return; // scope 0 at $DIR/terminator.rs:+4:2: +4:2
+ }
+ }
+
diff --git a/src/test/mir-opt/dataflow-const-prop/terminator.rs b/src/test/mir-opt/dataflow-const-prop/terminator.rs
new file mode 100644
index 0000000000000..d151f666a2dc2
--- /dev/null
+++ b/src/test/mir-opt/dataflow-const-prop/terminator.rs
@@ -0,0 +1,10 @@
+// unit-test: DataflowConstProp
+
+fn foo(n: i32) {}
+
+// EMIT_MIR terminator.main.DataflowConstProp.diff
+fn main() {
+ let a = 1;
+ // Checks that we propagate into terminators.
+ foo(a + 1);
+}
diff --git a/src/test/mir-opt/dataflow-const-prop/tuple.main.DataflowConstProp.diff b/src/test/mir-opt/dataflow-const-prop/tuple.main.DataflowConstProp.diff
new file mode 100644
index 0000000000000..e028def00a116
--- /dev/null
+++ b/src/test/mir-opt/dataflow-const-prop/tuple.main.DataflowConstProp.diff
@@ -0,0 +1,75 @@
+- // MIR for `main` before DataflowConstProp
++ // MIR for `main` after DataflowConstProp
+
+ fn main() -> () {
+ let mut _0: (); // return place in scope 0 at $DIR/tuple.rs:+0:11: +0:11
+ let mut _1: (i32, i32); // in scope 0 at $DIR/tuple.rs:+1:9: +1:14
+ let mut _3: i32; // in scope 0 at $DIR/tuple.rs:+2:13: +2:22
+ let mut _4: i32; // in scope 0 at $DIR/tuple.rs:+2:13: +2:16
+ let mut _5: i32; // in scope 0 at $DIR/tuple.rs:+2:19: +2:22
+ let mut _7: i32; // in scope 0 at $DIR/tuple.rs:+4:13: +4:22
+ let mut _8: i32; // in scope 0 at $DIR/tuple.rs:+4:13: +4:16
+ let mut _9: i32; // in scope 0 at $DIR/tuple.rs:+4:19: +4:22
+ let mut _10: i32; // in scope 0 at $DIR/tuple.rs:+4:25: +4:26
+ scope 1 {
+ debug a => _1; // in scope 1 at $DIR/tuple.rs:+1:9: +1:14
+ let _2: i32; // in scope 1 at $DIR/tuple.rs:+2:9: +2:10
+ scope 2 {
+ debug b => _2; // in scope 2 at $DIR/tuple.rs:+2:9: +2:10
+ let _6: i32; // in scope 2 at $DIR/tuple.rs:+4:9: +4:10
+ scope 3 {
+ debug c => _6; // in scope 3 at $DIR/tuple.rs:+4:9: +4:10
+ }
+ }
+ }
+
+ bb0: {
+ StorageLive(_1); // scope 0 at $DIR/tuple.rs:+1:9: +1:14
+ Deinit(_1); // scope 0 at $DIR/tuple.rs:+1:17: +1:23
+ (_1.0: i32) = const 1_i32; // scope 0 at $DIR/tuple.rs:+1:17: +1:23
+ (_1.1: i32) = const 2_i32; // scope 0 at $DIR/tuple.rs:+1:17: +1:23
+ StorageLive(_2); // scope 1 at $DIR/tuple.rs:+2:9: +2:10
+ StorageLive(_3); // scope 1 at $DIR/tuple.rs:+2:13: +2:22
+ StorageLive(_4); // scope 1 at $DIR/tuple.rs:+2:13: +2:16
+- _4 = (_1.0: i32); // scope 1 at $DIR/tuple.rs:+2:13: +2:16
++ _4 = const 1_i32; // scope 1 at $DIR/tuple.rs:+2:13: +2:16
+ StorageLive(_5); // scope 1 at $DIR/tuple.rs:+2:19: +2:22
+- _5 = (_1.1: i32); // scope 1 at $DIR/tuple.rs:+2:19: +2:22
+- _3 = Add(move _4, move _5); // scope 1 at $DIR/tuple.rs:+2:13: +2:22
++ _5 = const 2_i32; // scope 1 at $DIR/tuple.rs:+2:19: +2:22
++ _3 = const 3_i32; // scope 1 at $DIR/tuple.rs:+2:13: +2:22
+ StorageDead(_5); // scope 1 at $DIR/tuple.rs:+2:21: +2:22
+ StorageDead(_4); // scope 1 at $DIR/tuple.rs:+2:21: +2:22
+- _2 = Add(move _3, const 3_i32); // scope 1 at $DIR/tuple.rs:+2:13: +2:26
++ _2 = const 6_i32; // scope 1 at $DIR/tuple.rs:+2:13: +2:26
+ StorageDead(_3); // scope 1 at $DIR/tuple.rs:+2:25: +2:26
+ Deinit(_1); // scope 2 at $DIR/tuple.rs:+3:5: +3:15
+ (_1.0: i32) = const 2_i32; // scope 2 at $DIR/tuple.rs:+3:5: +3:15
+ (_1.1: i32) = const 3_i32; // scope 2 at $DIR/tuple.rs:+3:5: +3:15
+ StorageLive(_6); // scope 2 at $DIR/tuple.rs:+4:9: +4:10
+ StorageLive(_7); // scope 2 at $DIR/tuple.rs:+4:13: +4:22
+ StorageLive(_8); // scope 2 at $DIR/tuple.rs:+4:13: +4:16
+- _8 = (_1.0: i32); // scope 2 at $DIR/tuple.rs:+4:13: +4:16
++ _8 = const 2_i32; // scope 2 at $DIR/tuple.rs:+4:13: +4:16
+ StorageLive(_9); // scope 2 at $DIR/tuple.rs:+4:19: +4:22
+- _9 = (_1.1: i32); // scope 2 at $DIR/tuple.rs:+4:19: +4:22
+- _7 = Add(move _8, move _9); // scope 2 at $DIR/tuple.rs:+4:13: +4:22
++ _9 = const 3_i32; // scope 2 at $DIR/tuple.rs:+4:19: +4:22
++ _7 = const 5_i32; // scope 2 at $DIR/tuple.rs:+4:13: +4:22
+ StorageDead(_9); // scope 2 at $DIR/tuple.rs:+4:21: +4:22
+ StorageDead(_8); // scope 2 at $DIR/tuple.rs:+4:21: +4:22
+ StorageLive(_10); // scope 2 at $DIR/tuple.rs:+4:25: +4:26
+- _10 = _2; // scope 2 at $DIR/tuple.rs:+4:25: +4:26
+- _6 = Add(move _7, move _10); // scope 2 at $DIR/tuple.rs:+4:13: +4:26
++ _10 = const 6_i32; // scope 2 at $DIR/tuple.rs:+4:25: +4:26
++ _6 = const 11_i32; // scope 2 at $DIR/tuple.rs:+4:13: +4:26
+ StorageDead(_10); // scope 2 at $DIR/tuple.rs:+4:25: +4:26
+ StorageDead(_7); // scope 2 at $DIR/tuple.rs:+4:25: +4:26
+ _0 = const (); // scope 0 at $DIR/tuple.rs:+0:11: +5:2
+ StorageDead(_6); // scope 2 at $DIR/tuple.rs:+5:1: +5:2
+ StorageDead(_2); // scope 1 at $DIR/tuple.rs:+5:1: +5:2
+ StorageDead(_1); // scope 0 at $DIR/tuple.rs:+5:1: +5:2
+ return; // scope 0 at $DIR/tuple.rs:+5:2: +5:2
+ }
+ }
+
diff --git a/src/test/mir-opt/dataflow-const-prop/tuple.rs b/src/test/mir-opt/dataflow-const-prop/tuple.rs
new file mode 100644
index 0000000000000..92c70eab0ff6f
--- /dev/null
+++ b/src/test/mir-opt/dataflow-const-prop/tuple.rs
@@ -0,0 +1,9 @@
+// unit-test: DataflowConstProp
+
+// EMIT_MIR tuple.main.DataflowConstProp.diff
+fn main() {
+ let mut a = (1, 2);
+ let b = a.0 + a.1 + 3;
+ a = (2, 3);
+ let c = a.0 + a.1 + b;
+}
diff --git a/src/test/mir-opt/issue_101973.inner.ConstProp.diff b/src/test/mir-opt/issue_101973.inner.ConstProp.diff
index c24abedae927f..bce40f277d23b 100644
--- a/src/test/mir-opt/issue_101973.inner.ConstProp.diff
+++ b/src/test/mir-opt/issue_101973.inner.ConstProp.diff
@@ -37,7 +37,6 @@
StorageLive(_4); // scope 0 at $DIR/issue_101973.rs:+1:5: +1:17
StorageLive(_5); // scope 0 at $DIR/issue_101973.rs:+1:10: +1:16
_5 = _1; // scope 0 at $DIR/issue_101973.rs:+1:10: +1:16
- _4 = const 0_u32; // scope 1 at $DIR/issue_101973.rs:6:19: 6:23
StorageLive(_12); // scope 2 at $DIR/issue_101973.rs:7:12: 7:27
StorageLive(_13); // scope 2 at $DIR/issue_101973.rs:7:12: 7:20
StorageLive(_14); // scope 2 at $DIR/issue_101973.rs:7:13: 7:14
@@ -73,7 +72,7 @@
StorageDead(_14); // scope 2 at $DIR/issue_101973.rs:7:19: 7:20
_12 = BitAnd(move _13, const 255_u32); // scope 2 at $DIR/issue_101973.rs:7:12: 7:27
StorageDead(_13); // scope 2 at $DIR/issue_101973.rs:7:26: 7:27
- _4 = BitOr(_4, move _12); // scope 2 at $DIR/issue_101973.rs:7:5: 7:27
+ _4 = BitOr(const 0_u32, move _12); // scope 2 at $DIR/issue_101973.rs:7:5: 7:27
StorageDead(_12); // scope 2 at $DIR/issue_101973.rs:7:26: 7:27
StorageDead(_5); // scope 0 at $DIR/issue_101973.rs:+1:16: +1:17
StorageLive(_6); // scope 0 at $DIR/issue_101973.rs:+1:31: +1:57
diff --git a/src/test/mir-opt/lower_intrinsics_e2e.f_u64.PreCodegen.after.mir b/src/test/mir-opt/lower_intrinsics_e2e.f_u64.PreCodegen.after.mir
index 8e185323e1a84..f6d8bdd742289 100644
--- a/src/test/mir-opt/lower_intrinsics_e2e.f_u64.PreCodegen.after.mir
+++ b/src/test/mir-opt/lower_intrinsics_e2e.f_u64.PreCodegen.after.mir
@@ -6,25 +6,20 @@ fn f_u64() -> () {
scope 1 (inlined f_dispatch::) { // at $DIR/lower_intrinsics_e2e.rs:15:5: 15:21
debug t => _1; // in scope 1 at $DIR/lower_intrinsics_e2e.rs:19:22: 19:23
let _2: (); // in scope 1 at $DIR/lower_intrinsics_e2e.rs:23:9: 23:21
- let mut _3: u64; // in scope 1 at $DIR/lower_intrinsics_e2e.rs:23:19: 23:20
scope 2 (inlined std::mem::size_of::) { // at $DIR/lower_intrinsics_e2e.rs:20:8: 20:32
}
}
bb0: {
StorageLive(_1); // scope 0 at $DIR/lower_intrinsics_e2e.rs:+1:5: +1:21
- _1 = const 0_u64; // scope 0 at $DIR/lower_intrinsics_e2e.rs:+1:5: +1:21
StorageLive(_2); // scope 1 at $DIR/lower_intrinsics_e2e.rs:23:9: 23:21
- StorageLive(_3); // scope 1 at $DIR/lower_intrinsics_e2e.rs:23:19: 23:20
- _3 = move _1; // scope 1 at $DIR/lower_intrinsics_e2e.rs:23:19: 23:20
- _2 = f_non_zst::(move _3) -> bb1; // scope 1 at $DIR/lower_intrinsics_e2e.rs:23:9: 23:21
+ _2 = f_non_zst::(const 0_u64) -> bb1; // scope 1 at $DIR/lower_intrinsics_e2e.rs:23:9: 23:21
// mir::Constant
// + span: $DIR/lower_intrinsics_e2e.rs:23:9: 23:18
// + literal: Const { ty: fn(u64) {f_non_zst::}, val: Value() }
}
bb1: {
- StorageDead(_3); // scope 1 at $DIR/lower_intrinsics_e2e.rs:23:20: 23:21
StorageDead(_2); // scope 1 at $DIR/lower_intrinsics_e2e.rs:23:21: 23:22
StorageDead(_1); // scope 0 at $DIR/lower_intrinsics_e2e.rs:+1:5: +1:21
return; // scope 0 at $DIR/lower_intrinsics_e2e.rs:+2:2: +2:2
diff --git a/src/test/ui/consts/const-eval/issue-50814.rs b/src/test/ui/consts/const-eval/issue-50814.rs
index 5a587701f787a..9c6108292b527 100644
--- a/src/test/ui/consts/const-eval/issue-50814.rs
+++ b/src/test/ui/consts/const-eval/issue-50814.rs
@@ -9,16 +9,16 @@ impl Unsigned for U8 {
const MAX: u8 = 0xff;
}
-struct Sum(A,B);
+struct Sum(A, B);
-impl Unsigned for Sum {
+impl Unsigned for Sum {
const MAX: u8 = A::MAX + B::MAX;
//~^ ERROR evaluation of ` as Unsigned>::MAX` failed
}
fn foo(_: T) -> &'static u8 {
- &Sum::::MAX
- //~^ ERROR E0080
+ &Sum::::MAX
+ //~^ ERROR evaluation of `foo::` failed [E0080]
}
fn main() {
diff --git a/src/test/ui/consts/const-eval/issue-50814.stderr b/src/test/ui/consts/const-eval/issue-50814.stderr
index 46dd2b89fa2ac..38e9dc36ee98f 100644
--- a/src/test/ui/consts/const-eval/issue-50814.stderr
+++ b/src/test/ui/consts/const-eval/issue-50814.stderr
@@ -7,8 +7,8 @@ LL | const MAX: u8 = A::MAX + B::MAX;
error[E0080]: evaluation of `foo::` failed
--> $DIR/issue-50814.rs:20:6
|
-LL | &Sum::::MAX
- | ^^^^^^^^^^^^^^^^^ referenced constant has errors
+LL | &Sum::::MAX
+ | ^^^^^^^^^^^^^^^^^^ referenced constant has errors
note: the above error was encountered while instantiating `fn foo::`
--> $DIR/issue-50814.rs:25:5