Skip to content

Rollup of 7 pull requests #83790

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 28 commits into from
Apr 2, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
28 commits
Select commit Hold shift + click to select a range
7c89cc4
Add SharedResource abstraction and use it in write_shared
jyn514 Mar 25, 2021
f77ebd4
Add unstable option to only emit shared/crate-specific files
jyn514 Mar 25, 2021
0dbed61
Rework `std::sys::windows::alloc`
CDirkx Mar 12, 2021
b01bf0e
Apply suggestions from code review
CDirkx Mar 22, 2021
4cce9e3
Cache `GetProcessHeap`
CDirkx Mar 26, 2021
fa89c0f
add testcase for double-drop during Vec in-place collection
the8472 Mar 29, 2021
421f5d2
fix double-drop in in-place collect specialization
the8472 Mar 29, 2021
f2e52ff
2229: Produce a rustfix migration suggestion
arora-aman Mar 29, 2021
8f77356
give full path of constraint in suggest_constraining_type_param
Rustin170506 Mar 30, 2021
e78fac5
Handle the case of partially captured drop type
arora-aman Mar 31, 2021
d4f3f91
Enforce that Toolchain files are static and Crate files are dynamic
jyn514 Mar 31, 2021
1086d9b
Rename CrateSpecific -> InvocationSpecific
jyn514 Mar 31, 2021
413938d
Fix `--external-css` to be invocation-specific and note main.js shoul…
jyn514 Mar 31, 2021
18af989
Update lint message
arora-aman Apr 1, 2021
da86348
Update test cases
arora-aman Apr 1, 2021
a721957
Don't introduce a block if a block exists
arora-aman Apr 2, 2021
1b9620d
Make the diagnostic message more readable
arora-aman Apr 2, 2021
ca14abb
Fix stack overflow detection on FreeBSD 11.1+
asomers Mar 27, 2021
fad5388
Simplify coverage tests
richkadel Apr 1, 2021
c86e098
Introduce `get_process_heap` and fix atomic ordering.
CDirkx Apr 2, 2021
db1d003
Remove `debug_assert`
CDirkx Apr 2, 2021
48ebad5
Rollup merge of #83065 - CDirkx:win-alloc, r=dtolnay
Dylan-DPC Apr 2, 2021
31f5320
Rollup merge of #83478 - jyn514:fine-grained-files, r=Mark-Simulacrum
Dylan-DPC Apr 2, 2021
542f441
Rollup merge of #83629 - the8472:fix-inplace-panic-on-drop, r=m-ou-se
Dylan-DPC Apr 2, 2021
6cb74ad
Rollup merge of #83673 - hi-rustin:rustin-patch-suggestion, r=estebank
Dylan-DPC Apr 2, 2021
7009117
Rollup merge of #83755 - richkadel:cov-test-simplify, r=tmandry
Dylan-DPC Apr 2, 2021
eed73c6
Rollup merge of #83757 - sexxi-goose:migrations_out, r=nikomatsakis
Dylan-DPC Apr 2, 2021
cb7133f
Rollup merge of #83771 - asomers:stack_overflow_freebsd, r=dtolnay
Dylan-DPC Apr 2, 2021
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ use std::fmt;

use super::InferCtxtPrivExt;
use crate::traits::query::evaluate_obligation::InferCtxtExt as _;
use rustc_middle::ty::print::with_no_trimmed_paths;

#[derive(Debug)]
pub enum GeneratorInteriorOrUpvar {
Expand Down Expand Up @@ -440,7 +441,8 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> {
{
// Missing generic type parameter bound.
let param_name = self_ty.to_string();
let constraint = trait_ref.print_only_trait_path().to_string();
let constraint =
with_no_trimmed_paths(|| trait_ref.print_only_trait_path().to_string());
if suggest_constraining_type_param(
self.tcx,
generics,
Expand Down
109 changes: 81 additions & 28 deletions compiler/rustc_typeck/src/check/upvar.rs
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ use super::FnCtxt;

use crate::expr_use_visitor as euv;
use rustc_data_structures::fx::FxIndexMap;
use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_hir::def_id::DefId;
use rustc_hir::def_id::LocalDefId;
Expand Down Expand Up @@ -91,7 +92,7 @@ impl<'a, 'tcx> Visitor<'tcx> for InferBorrowKindVisitor<'a, 'tcx> {
if let hir::ExprKind::Closure(cc, _, body_id, _, _) = expr.kind {
let body = self.fcx.tcx.hir().body(body_id);
self.visit_body(body);
self.fcx.analyze_closure(expr.hir_id, expr.span, body, cc);
self.fcx.analyze_closure(expr.hir_id, expr.span, body_id, body, cc);
}

intravisit::walk_expr(self, expr);
Expand All @@ -104,6 +105,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
&self,
closure_hir_id: hir::HirId,
span: Span,
body_id: hir::BodyId,
body: &'tcx hir::Body<'tcx>,
capture_clause: hir::CaptureBy,
) {
Expand Down Expand Up @@ -167,7 +169,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {

let closure_hir_id = self.tcx.hir().local_def_id_to_hir_id(local_def_id);
if should_do_migration_analysis(self.tcx, closure_hir_id) {
self.perform_2229_migration_anaysis(closure_def_id, capture_clause, span);
self.perform_2229_migration_anaysis(closure_def_id, body_id, capture_clause, span);
}

// We now fake capture information for all variables that are mentioned within the closure
Expand Down Expand Up @@ -465,6 +467,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
fn perform_2229_migration_anaysis(
&self,
closure_def_id: DefId,
body_id: hir::BodyId,
capture_clause: hir::CaptureBy,
span: Span,
) {
Expand All @@ -476,7 +479,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
);

if !need_migrations.is_empty() {
let migrations_text = migration_suggestion_for_2229(self.tcx, &need_migrations);
let (migration_string, migrated_variables_concat) =
migration_suggestion_for_2229(self.tcx, &need_migrations);

let local_def_id = closure_def_id.expect_local();
let closure_hir_id = self.tcx.hir().local_def_id_to_hir_id(local_def_id);
Expand All @@ -488,7 +492,35 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let mut diagnostics_builder = lint.build(
"drop order affected for closure because of `capture_disjoint_fields`",
);
diagnostics_builder.note(&migrations_text);
let closure_body_span = self.tcx.hir().span(body_id.hir_id);
let (sugg, app) =
match self.tcx.sess.source_map().span_to_snippet(closure_body_span) {
Ok(s) => {
let trimmed = s.trim_start();

// If the closure contains a block then replace the opening brace
// with "{ let _ = (..); "
let sugg = if let Some('{') = trimmed.chars().next() {
format!("{{ {}; {}", migration_string, &trimmed[1..])
} else {
format!("{{ {}; {} }}", migration_string, s)
};
(sugg, Applicability::MachineApplicable)
}
Err(_) => (migration_string.clone(), Applicability::HasPlaceholders),
};

let diagnostic_msg = format!(
"add a dummy let to cause {} to be fully captured",
migrated_variables_concat
);

diagnostics_builder.span_suggestion(
closure_body_span,
&diagnostic_msg,
sugg,
app,
);
diagnostics_builder.emit();
},
);
Expand Down Expand Up @@ -621,7 +653,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
/// `w[c]`.
/// Notation:
/// - Ty(place): Type of place
/// - `(a, b)`: Represents the function parameters `base_path_ty` and `captured_projs`
/// - `(a, b)`: Represents the function parameters `base_path_ty` and `captured_by_move_projs`
/// respectively.
/// ```
/// (Ty(w), [ &[p, x], &[c] ])
Expand Down Expand Up @@ -682,7 +714,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
closure_def_id: DefId,
closure_span: Span,
base_path_ty: Ty<'tcx>,
captured_projs: Vec<&[Projection<'tcx>]>,
captured_by_move_projs: Vec<&[Projection<'tcx>]>,
) -> bool {
let needs_drop = |ty: Ty<'tcx>| {
ty.needs_drop(self.tcx, self.tcx.param_env(closure_def_id.expect_local()))
Expand All @@ -707,33 +739,37 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
//
// eg. If `a.b` is captured and we are processing `a.b`, then we can't have the closure also
// capture `a.b.c`, because that voilates min capture.
let is_completely_captured = captured_projs.iter().any(|projs| projs.is_empty());
let is_completely_captured = captured_by_move_projs.iter().any(|projs| projs.is_empty());

assert!(!is_completely_captured || (captured_projs.len() == 1));
assert!(!is_completely_captured || (captured_by_move_projs.len() == 1));

if is_completely_captured {
// The place is captured entirely, so doesn't matter if needs dtor, it will be drop
// when the closure is dropped.
return false;
}

if captured_by_move_projs.is_empty() {
return needs_drop(base_path_ty);
}

if is_drop_defined_for_ty {
// If drop is implemented for this type then we need it to be fully captured,
// which we know it is not because of the previous check. Therefore we need to
// do migrate.
return true;
}
// and we know it is not completely captured because of the previous checks.

if captured_projs.is_empty() {
return needs_drop(base_path_ty);
// Note that this is a bug in the user code that will be reported by the
// borrow checker, since we can't move out of drop types.

// The bug exists in the user's code pre-migration, and we don't migrate here.
return false;
}

match base_path_ty.kind() {
// Observations:
// - `captured_projs` is not empty. Therefore we can call
// `captured_projs.first().unwrap()` safely.
// - All entries in `captured_projs` have atleast one projection.
// Therefore we can call `captured_projs.first().unwrap().first().unwrap()` safely.
// - `captured_by_move_projs` is not empty. Therefore we can call
// `captured_by_move_projs.first().unwrap()` safely.
// - All entries in `captured_by_move_projs` have atleast one projection.
// Therefore we can call `captured_by_move_projs.first().unwrap().first().unwrap()` safely.

// We don't capture derefs in case of move captures, which would have be applied to
// access any further paths.
Expand All @@ -743,19 +779,19 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {

ty::Adt(def, substs) => {
// Multi-varaint enums are captured in entirety,
// which would've been handled in the case of single empty slice in `captured_projs`.
// which would've been handled in the case of single empty slice in `captured_by_move_projs`.
assert_eq!(def.variants.len(), 1);

// Only Field projections can be applied to a non-box Adt.
assert!(
captured_projs.iter().all(|projs| matches!(
captured_by_move_projs.iter().all(|projs| matches!(
projs.first().unwrap().kind,
ProjectionKind::Field(..)
))
);
def.variants.get(VariantIdx::new(0)).unwrap().fields.iter().enumerate().any(
|(i, field)| {
let paths_using_field = captured_projs
let paths_using_field = captured_by_move_projs
.iter()
.filter_map(|projs| {
if let ProjectionKind::Field(field_idx, _) =
Expand All @@ -782,14 +818,14 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
ty::Tuple(..) => {
// Only Field projections can be applied to a tuple.
assert!(
captured_projs.iter().all(|projs| matches!(
captured_by_move_projs.iter().all(|projs| matches!(
projs.first().unwrap().kind,
ProjectionKind::Field(..)
))
);

base_path_ty.tuple_fields().enumerate().any(|(i, element_ty)| {
let paths_using_field = captured_projs
let paths_using_field = captured_by_move_projs
.iter()
.filter_map(|projs| {
if let ProjectionKind::Field(field_idx, _) = projs.first().unwrap().kind
Expand Down Expand Up @@ -1515,12 +1551,29 @@ fn should_do_migration_analysis(tcx: TyCtxt<'_>, closure_id: hir::HirId) -> bool
!matches!(level, lint::Level::Allow)
}

fn migration_suggestion_for_2229(tcx: TyCtxt<'_>, need_migrations: &Vec<hir::HirId>) -> String {
let need_migrations_strings =
need_migrations.iter().map(|v| format!("{}", var_name(tcx, *v))).collect::<Vec<_>>();
let migrations_list_concat = need_migrations_strings.join(", ");
/// Return a two string tuple (s1, s2)
/// - s1: Line of code that is needed for the migration: eg: `let _ = (&x, ...)`.
/// - s2: Comma separated names of the variables being migrated.
fn migration_suggestion_for_2229(
tcx: TyCtxt<'_>,
need_migrations: &Vec<hir::HirId>,
) -> (String, String) {
let need_migrations_variables =
need_migrations.iter().map(|v| var_name(tcx, *v)).collect::<Vec<_>>();

let migration_ref_concat =
need_migrations_variables.iter().map(|v| format!("&{}", v)).collect::<Vec<_>>().join(", ");

let migration_string = if 1 == need_migrations.len() {
format!("let _ = {}", migration_ref_concat)
} else {
format!("let _ = ({})", migration_ref_concat)
};

let migrated_variables_concat =
need_migrations_variables.iter().map(|v| format!("`{}`", v)).collect::<Vec<_>>().join(", ");

format!("drop(&({}));", migrations_list_concat)
(migration_string, migrated_variables_concat)
}

/// Helper function to determine if we need to escalate CaptureKind from
Expand Down
27 changes: 18 additions & 9 deletions library/alloc/src/vec/into_iter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -85,20 +85,29 @@ impl<T, A: Allocator> IntoIter<T, A> {
ptr::slice_from_raw_parts_mut(self.ptr as *mut T, self.len())
}

pub(super) fn drop_remaining(&mut self) {
unsafe {
ptr::drop_in_place(self.as_mut_slice());
}
self.ptr = self.end;
}
/// Drops remaining elements and relinquishes the backing allocation.
///
/// This is roughly equivalent to the following, but more efficient
///
/// ```
/// # let mut into_iter = Vec::<u8>::with_capacity(10).into_iter();
/// (&mut into_iter).for_each(core::mem::drop);
/// unsafe { core::ptr::write(&mut into_iter, Vec::new().into_iter()); }
/// ```
pub(super) fn forget_allocation_drop_remaining(&mut self) {
let remaining = self.as_raw_mut_slice();

/// Relinquishes the backing allocation, equivalent to
/// `ptr::write(&mut self, Vec::new().into_iter())`
pub(super) fn forget_allocation(&mut self) {
// overwrite the individual fields instead of creating a new
// struct and then overwriting &mut self.
// this creates less assembly
self.cap = 0;
self.buf = unsafe { NonNull::new_unchecked(RawVec::NEW.ptr()) };
self.ptr = self.buf.as_ptr();
self.end = self.buf.as_ptr();

unsafe {
ptr::drop_in_place(remaining);
}
}
}

Expand Down
4 changes: 2 additions & 2 deletions library/alloc/src/vec/source_iter_marker.rs
Original file line number Diff line number Diff line change
Expand Up @@ -69,9 +69,9 @@ where
}

// drop any remaining values at the tail of the source
src.drop_remaining();
// but prevent drop of the allocation itself once IntoIter goes out of scope
src.forget_allocation();
// if the drop panics then we also leak any elements collected into dst_buf
src.forget_allocation_drop_remaining();

let vec = unsafe { Vec::from_raw_parts(dst_buf, len, cap) };

Expand Down
38 changes: 37 additions & 1 deletion library/alloc/tests/vec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1027,7 +1027,7 @@ fn test_from_iter_specialization_head_tail_drop() {
}

#[test]
fn test_from_iter_specialization_panic_drop() {
fn test_from_iter_specialization_panic_during_iteration_drops() {
let drop_count: Vec<_> = (0..=2).map(|_| Rc::new(())).collect();
let src: Vec<_> = drop_count.iter().cloned().collect();
let iter = src.into_iter();
Expand All @@ -1050,6 +1050,42 @@ fn test_from_iter_specialization_panic_drop() {
);
}

#[test]
fn test_from_iter_specialization_panic_during_drop_leaks() {
static mut DROP_COUNTER: usize = 0;

#[derive(Debug)]
enum Droppable {
DroppedTwice(Box<i32>),
PanicOnDrop,
}

impl Drop for Droppable {
fn drop(&mut self) {
match self {
Droppable::DroppedTwice(_) => {
unsafe {
DROP_COUNTER += 1;
}
println!("Dropping!")
}
Droppable::PanicOnDrop => {
if !std::thread::panicking() {
panic!();
}
}
}
}
}

let _ = std::panic::catch_unwind(AssertUnwindSafe(|| {
let v = vec![Droppable::DroppedTwice(Box::new(123)), Droppable::PanicOnDrop];
let _ = v.into_iter().take(0).collect::<Vec<_>>();
}));

assert_eq!(unsafe { DROP_COUNTER }, 1);
}

#[test]
fn test_cow_from() {
let borrowed: &[_] = &["borrowed", "(slice)"];
Expand Down
23 changes: 16 additions & 7 deletions library/std/src/sys/unix/thread.rs
Original file line number Diff line number Diff line change
Expand Up @@ -343,6 +343,20 @@ pub mod guard {
// it can eventually grow to. It cannot be used to determine
// the position of kernel's stack guard.
None
} else if cfg!(target_os = "freebsd") {
// FreeBSD's stack autogrows, and optionally includes a guard page
// at the bottom. If we try to remap the bottom of the stack
// ourselves, FreeBSD's guard page moves upwards. So we'll just use
// the builtin guard page.
let stackaddr = get_stack_start_aligned()?;
let guardaddr = stackaddr as usize;
// Technically the number of guard pages is tunable and controlled
// by the security.bsd.stack_guard_page sysctl, but there are
// few reasons to change it from the default. The default value has
// been 1 ever since FreeBSD 11.1 and 10.4.
const GUARD_PAGES: usize = 1;
let guard = guardaddr..guardaddr + GUARD_PAGES * page_size;
Some(guard)
} else {
// Reallocate the last page of the stack.
// This ensures SIGBUS will be raised on
Expand Down Expand Up @@ -371,9 +385,8 @@ pub mod guard {
}

let guardaddr = stackaddr as usize;
let offset = if cfg!(target_os = "freebsd") { 2 } else { 1 };

Some(guardaddr..guardaddr + offset * page_size)
Some(guardaddr..guardaddr + page_size)
}
}

Expand Down Expand Up @@ -417,11 +430,7 @@ pub mod guard {
assert_eq!(libc::pthread_attr_getstack(&attr, &mut stackaddr, &mut size), 0);

let stackaddr = stackaddr as usize;
ret = if cfg!(target_os = "freebsd") {
// FIXME does freebsd really fault *below* the guard addr?
let guardaddr = stackaddr - guardsize;
Some(guardaddr - PAGE_SIZE.load(Ordering::Relaxed)..guardaddr)
} else if cfg!(target_os = "netbsd") {
ret = if cfg!(any(target_os = "freebsd", target_os = "netbsd")) {
Some(stackaddr - guardsize..stackaddr)
} else if cfg!(all(target_os = "linux", target_env = "musl")) {
Some(stackaddr - guardsize..stackaddr)
Expand Down
Loading