Skip to content

Commit

Permalink
Auto merge of #125313 - matthiaskrgr:rollup-65etxv0, r=matthiaskrgr
Browse files Browse the repository at this point in the history
Rollup of 5 pull requests

Successful merges:

 - #125034 (Weekly `cargo update`)
 - #125093 (Add `fn into_raw_with_allocator` to Rc/Arc/Weak.)
 - #125282 (Never type unsafe lint improvements)
 - #125301 (fix suggestion in E0373 for !Unpin coroutines)
 - #125302 (defrost `RUST_MIN_STACK=ice rustc hello.rs`)

r? `@ghost`
`@rustbot` modify labels: rollup
  • Loading branch information
bors committed May 20, 2024
2 parents f092f73 + ecbd110 commit e8ada6a
Show file tree
Hide file tree
Showing 18 changed files with 445 additions and 150 deletions.
221 changes: 138 additions & 83 deletions Cargo.lock

Large diffs are not rendered by default.

4 changes: 4 additions & 0 deletions compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3343,6 +3343,10 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
} else if string.starts_with("gen") {
// `gen` is 3 chars long
Some(3)
} else if string.starts_with("static") {
// `static` is 6 chars long
// This is used for `!Unpin` coroutines
Some(6)
} else {
None
};
Expand Down
75 changes: 40 additions & 35 deletions compiler/rustc_hir_typeck/src/fallback.rs
Original file line number Diff line number Diff line change
Expand Up @@ -364,41 +364,11 @@ impl<'tcx> FnCtxt<'_, 'tcx> {
};

let mut fallback_to = |ty| {
let unsafe_infer_vars = unsafe_infer_vars.get_or_init(|| {
let unsafe_infer_vars = compute_unsafe_infer_vars(self.root_ctxt, self.body_id);
debug!(?unsafe_infer_vars);
unsafe_infer_vars
});

let affected_unsafe_infer_vars =
graph::depth_first_search_as_undirected(&coercion_graph, root_vid)
.filter_map(|x| unsafe_infer_vars.get(&x).copied())
.collect::<Vec<_>>();

for (hir_id, span, reason) in affected_unsafe_infer_vars {
self.tcx.emit_node_span_lint(
lint::builtin::NEVER_TYPE_FALLBACK_FLOWING_INTO_UNSAFE,
hir_id,
span,
match reason {
UnsafeUseReason::Call => {
errors::NeverTypeFallbackFlowingIntoUnsafe::Call
}
UnsafeUseReason::Method => {
errors::NeverTypeFallbackFlowingIntoUnsafe::Method
}
UnsafeUseReason::Path => {
errors::NeverTypeFallbackFlowingIntoUnsafe::Path
}
UnsafeUseReason::UnionField => {
errors::NeverTypeFallbackFlowingIntoUnsafe::UnionField
}
UnsafeUseReason::Deref => {
errors::NeverTypeFallbackFlowingIntoUnsafe::Deref
}
},
);
}
self.lint_never_type_fallback_flowing_into_unsafe_code(
&unsafe_infer_vars,
&coercion_graph,
root_vid,
);

diverging_fallback.insert(diverging_ty, ty);
};
Expand Down Expand Up @@ -464,6 +434,41 @@ impl<'tcx> FnCtxt<'_, 'tcx> {
diverging_fallback
}

fn lint_never_type_fallback_flowing_into_unsafe_code(
&self,
unsafe_infer_vars: &OnceCell<UnordMap<ty::TyVid, (HirId, Span, UnsafeUseReason)>>,
coercion_graph: &VecGraph<ty::TyVid, true>,
root_vid: ty::TyVid,
) {
let unsafe_infer_vars = unsafe_infer_vars.get_or_init(|| {
let unsafe_infer_vars = compute_unsafe_infer_vars(self.root_ctxt, self.body_id);
debug!(?unsafe_infer_vars);
unsafe_infer_vars
});

let affected_unsafe_infer_vars =
graph::depth_first_search_as_undirected(&coercion_graph, root_vid)
.filter_map(|x| unsafe_infer_vars.get(&x).copied())
.collect::<Vec<_>>();

for (hir_id, span, reason) in affected_unsafe_infer_vars {
self.tcx.emit_node_span_lint(
lint::builtin::NEVER_TYPE_FALLBACK_FLOWING_INTO_UNSAFE,
hir_id,
span,
match reason {
UnsafeUseReason::Call => errors::NeverTypeFallbackFlowingIntoUnsafe::Call,
UnsafeUseReason::Method => errors::NeverTypeFallbackFlowingIntoUnsafe::Method,
UnsafeUseReason::Path => errors::NeverTypeFallbackFlowingIntoUnsafe::Path,
UnsafeUseReason::UnionField => {
errors::NeverTypeFallbackFlowingIntoUnsafe::UnionField
}
UnsafeUseReason::Deref => errors::NeverTypeFallbackFlowingIntoUnsafe::Deref,
},
);
}
}

/// Returns a graph whose nodes are (unresolved) inference variables and where
/// an edge `?A -> ?B` indicates that the variable `?A` is coerced to `?B`.
fn create_coercion_graph(&self) -> VecGraph<ty::TyVid, true> {
Expand Down
1 change: 1 addition & 0 deletions compiler/rustc_interface/src/interface.rs
Original file line number Diff line number Diff line change
Expand Up @@ -389,6 +389,7 @@ pub fn run_compiler<R: Send>(config: Config, f: impl FnOnce(&Compiler) -> R + Se
let hash_kind = config.opts.unstable_opts.src_hash_algorithm(&target);

util::run_in_thread_pool_with_globals(
&early_dcx,
config.opts.edition,
config.opts.unstable_opts.threads,
SourceMapInputs { file_loader, path_mapping, hash_kind },
Expand Down
41 changes: 32 additions & 9 deletions compiler/rustc_interface/src/util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,20 +51,38 @@ pub fn add_configuration(cfg: &mut Cfg, sess: &mut Session, codegen_backend: &dy
pub static STACK_SIZE: OnceLock<usize> = OnceLock::new();
pub const DEFAULT_STACK_SIZE: usize = 8 * 1024 * 1024;

fn init_stack_size() -> usize {
fn init_stack_size(early_dcx: &EarlyDiagCtxt) -> usize {
// Obey the environment setting or default
*STACK_SIZE.get_or_init(|| {
env::var_os("RUST_MIN_STACK")
.map(|os_str| os_str.to_string_lossy().into_owned())
// ignore if it is set to nothing
.filter(|s| s.trim() != "")
.map(|s| s.trim().parse::<usize>().unwrap())
.as_ref()
.map(|os_str| os_str.to_string_lossy())
// if someone finds out `export RUST_MIN_STACK=640000` isn't enough stack
// they might try to "unset" it by running `RUST_MIN_STACK= rustc code.rs`
// this is wrong, but std would nonetheless "do what they mean", so let's do likewise
.filter(|s| !s.trim().is_empty())
// rustc is a batch program, so error early on inputs which are unlikely to be intended
// so no one thinks we parsed them setting `RUST_MIN_STACK="64 megabytes"`
// FIXME: we could accept `RUST_MIN_STACK=64MB`, perhaps?
.map(|s| {
let s = s.trim();
// FIXME(workingjubilee): add proper diagnostics when we factor out "pre-run" setup
#[allow(rustc::untranslatable_diagnostic, rustc::diagnostic_outside_of_impl)]
s.parse::<usize>().unwrap_or_else(|_| {
let mut err = early_dcx.early_struct_fatal(format!(
r#"`RUST_MIN_STACK` should be a number of bytes, but was "{s}""#,
));
err.note("you can also unset `RUST_MIN_STACK` to use the default stack size");
err.emit()
})
})
// otherwise pick a consistent default
.unwrap_or(DEFAULT_STACK_SIZE)
})
}

fn run_in_thread_with_globals<F: FnOnce(CurrentGcx) -> R + Send, R: Send>(
thread_stack_size: usize,
edition: Edition,
sm_inputs: SourceMapInputs,
f: F,
Expand All @@ -75,7 +93,7 @@ fn run_in_thread_with_globals<F: FnOnce(CurrentGcx) -> R + Send, R: Send>(
// the parallel compiler, in particular to ensure there is no accidental
// sharing of data between the main thread and the compilation thread
// (which might cause problems for the parallel compiler).
let builder = thread::Builder::new().name("rustc".to_string()).stack_size(init_stack_size());
let builder = thread::Builder::new().name("rustc".to_string()).stack_size(thread_stack_size);

// We build the session globals and run `f` on the spawned thread, because
// `SessionGlobals` does not impl `Send` in the non-parallel compiler.
Expand All @@ -100,16 +118,19 @@ fn run_in_thread_with_globals<F: FnOnce(CurrentGcx) -> R + Send, R: Send>(

#[cfg(not(parallel_compiler))]
pub(crate) fn run_in_thread_pool_with_globals<F: FnOnce(CurrentGcx) -> R + Send, R: Send>(
thread_builder_diag: &EarlyDiagCtxt,
edition: Edition,
_threads: usize,
sm_inputs: SourceMapInputs,
f: F,
) -> R {
run_in_thread_with_globals(edition, sm_inputs, f)
let thread_stack_size = init_stack_size(thread_builder_diag);
run_in_thread_with_globals(thread_stack_size, edition, sm_inputs, f)
}

#[cfg(parallel_compiler)]
pub(crate) fn run_in_thread_pool_with_globals<F: FnOnce(CurrentGcx) -> R + Send, R: Send>(
thread_builder_diag: &EarlyDiagCtxt,
edition: Edition,
threads: usize,
sm_inputs: SourceMapInputs,
Expand All @@ -121,10 +142,12 @@ pub(crate) fn run_in_thread_pool_with_globals<F: FnOnce(CurrentGcx) -> R + Send,
use rustc_query_system::query::{break_query_cycles, QueryContext};
use std::process;

let thread_stack_size = init_stack_size(thread_builder_diag);

let registry = sync::Registry::new(std::num::NonZero::new(threads).unwrap());

if !sync::is_dyn_thread_safe() {
return run_in_thread_with_globals(edition, sm_inputs, |current_gcx| {
return run_in_thread_with_globals(thread_stack_size, edition, sm_inputs, |current_gcx| {
// Register the thread for use with the `WorkerLocal` type.
registry.register();

Expand Down Expand Up @@ -167,7 +190,7 @@ pub(crate) fn run_in_thread_pool_with_globals<F: FnOnce(CurrentGcx) -> R + Send,
})
.unwrap();
})
.stack_size(init_stack_size());
.stack_size(thread_stack_size);

// We create the session globals on the main thread, then create the thread
// pool. Upon creation, each worker thread created gets a copy of the
Expand Down
10 changes: 7 additions & 3 deletions compiler/rustc_lint_defs/src/builtin.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4263,8 +4263,7 @@ declare_lint! {
///
/// // where absurd is a function with the following signature
/// // (it's sound, because `!` always marks unreachable code):
/// fn absurd<T>(_: !) -> T { ... }
// FIXME: use `core::convert::absurd` here instead, once it's merged
/// fn absurd<T>(never: !) -> T { ... }
/// ```
///
/// While it's convenient to be able to use non-diverging code in one of the branches (like
Expand Down Expand Up @@ -4321,7 +4320,12 @@ declare_lint! {
/// [`()`]: https://doc.rust-lang.org/core/primitive.unit.html
pub NEVER_TYPE_FALLBACK_FLOWING_INTO_UNSAFE,
Warn,
"never type fallback affecting unsafe function calls"
"never type fallback affecting unsafe function calls",
@future_incompatible = FutureIncompatibleInfo {
reason: FutureIncompatibilityReason::FutureReleaseSemanticsChange,
reference: "issue #123748 <https://github.com/rust-lang/rust/issues/123748>",
};
report_in_external_macro
}

declare_lint! {
Expand Down
50 changes: 40 additions & 10 deletions library/alloc/src/rc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1356,6 +1356,33 @@ impl<T: ?Sized, A: Allocator> Rc<T, A> {
ptr
}

/// Consumes the `Rc`, returning the wrapped pointer and allocator.
///
/// To avoid a memory leak the pointer must be converted back to an `Rc` using
/// [`Rc::from_raw_in`].
///
/// # Examples
///
/// ```
/// #![feature(allocator_api)]
/// use std::rc::Rc;
/// use std::alloc::System;
///
/// let x = Rc::new_in("hello".to_owned(), System);
/// let (ptr, alloc) = Rc::into_raw_with_allocator(x);
/// assert_eq!(unsafe { &*ptr }, "hello");
/// let x = unsafe { Rc::from_raw_in(ptr, alloc) };
/// assert_eq!(&*x, "hello");
/// ```
#[unstable(feature = "allocator_api", issue = "32838")]
pub fn into_raw_with_allocator(this: Self) -> (*const T, A) {
let this = mem::ManuallyDrop::new(this);
let ptr = Self::as_ptr(&this);
// Safety: `this` is ManuallyDrop so the allocator will not be double-dropped
let alloc = unsafe { ptr::read(&this.alloc) };
(ptr, alloc)
}

/// Provides a raw pointer to the data.
///
/// The counts are not affected in any way and the `Rc` is not consumed. The pointer is valid
Expand Down Expand Up @@ -3024,39 +3051,42 @@ impl<T: ?Sized, A: Allocator> Weak<T, A> {
result
}

/// Consumes the `Weak<T>` and turns it into a raw pointer.
/// Consumes the `Weak<T>`, returning the wrapped pointer and allocator.
///
/// This converts the weak pointer into a raw pointer, while still preserving the ownership of
/// one weak reference (the weak count is not modified by this operation). It can be turned
/// back into the `Weak<T>` with [`from_raw`].
/// back into the `Weak<T>` with [`from_raw_in`].
///
/// The same restrictions of accessing the target of the pointer as with
/// [`as_ptr`] apply.
///
/// # Examples
///
/// ```
/// #![feature(allocator_api)]
/// use std::rc::{Rc, Weak};
/// use std::alloc::System;
///
/// let strong = Rc::new("hello".to_owned());
/// let strong = Rc::new_in("hello".to_owned(), System);
/// let weak = Rc::downgrade(&strong);
/// let raw = weak.into_raw();
/// let (raw, alloc) = weak.into_raw_with_allocator();
///
/// assert_eq!(1, Rc::weak_count(&strong));
/// assert_eq!("hello", unsafe { &*raw });
///
/// drop(unsafe { Weak::from_raw(raw) });
/// drop(unsafe { Weak::from_raw_in(raw, alloc) });
/// assert_eq!(0, Rc::weak_count(&strong));
/// ```
///
/// [`from_raw`]: Weak::from_raw
/// [`from_raw_in`]: Weak::from_raw_in
/// [`as_ptr`]: Weak::as_ptr
#[inline]
#[unstable(feature = "allocator_api", issue = "32838")]
pub fn into_raw_and_alloc(self) -> (*const T, A) {
let rc = mem::ManuallyDrop::new(self);
let result = rc.as_ptr();
let alloc = unsafe { ptr::read(&rc.alloc) };
pub fn into_raw_with_allocator(self) -> (*const T, A) {
let this = mem::ManuallyDrop::new(self);
let result = this.as_ptr();
// Safety: `this` is ManuallyDrop so the allocator will not be double-dropped
let alloc = unsafe { ptr::read(&this.alloc) };
(result, alloc)
}

Expand Down
67 changes: 67 additions & 0 deletions library/alloc/src/sync.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1496,6 +1496,34 @@ impl<T: ?Sized, A: Allocator> Arc<T, A> {
ptr
}

/// Consumes the `Arc`, returning the wrapped pointer and allocator.
///
/// To avoid a memory leak the pointer must be converted back to an `Arc` using
/// [`Arc::from_raw_in`].
///
/// # Examples
///
/// ```
/// #![feature(allocator_api)]
/// use std::sync::Arc;
/// use std::alloc::System;
///
/// let x = Arc::new_in("hello".to_owned(), System);
/// let (ptr, alloc) = Arc::into_raw_with_allocator(x);
/// assert_eq!(unsafe { &*ptr }, "hello");
/// let x = unsafe { Arc::from_raw_in(ptr, alloc) };
/// assert_eq!(&*x, "hello");
/// ```
#[must_use = "losing the pointer will leak memory"]
#[unstable(feature = "allocator_api", issue = "32838")]
pub fn into_raw_with_allocator(this: Self) -> (*const T, A) {
let this = mem::ManuallyDrop::new(this);
let ptr = Self::as_ptr(&this);
// Safety: `this` is ManuallyDrop so the allocator will not be double-dropped
let alloc = unsafe { ptr::read(&this.alloc) };
(ptr, alloc)
}

/// Provides a raw pointer to the data.
///
/// The counts are not affected in any way and the `Arc` is not consumed. The pointer is valid for
Expand Down Expand Up @@ -2740,6 +2768,45 @@ impl<T: ?Sized, A: Allocator> Weak<T, A> {
result
}

/// Consumes the `Weak<T>`, returning the wrapped pointer and allocator.
///
/// This converts the weak pointer into a raw pointer, while still preserving the ownership of
/// one weak reference (the weak count is not modified by this operation). It can be turned
/// back into the `Weak<T>` with [`from_raw_in`].
///
/// The same restrictions of accessing the target of the pointer as with
/// [`as_ptr`] apply.
///
/// # Examples
///
/// ```
/// #![feature(allocator_api)]
/// use std::sync::{Arc, Weak};
/// use std::alloc::System;
///
/// let strong = Arc::new_in("hello".to_owned(), System);
/// let weak = Arc::downgrade(&strong);
/// let (raw, alloc) = weak.into_raw_with_allocator();
///
/// assert_eq!(1, Arc::weak_count(&strong));
/// assert_eq!("hello", unsafe { &*raw });
///
/// drop(unsafe { Weak::from_raw_in(raw, alloc) });
/// assert_eq!(0, Arc::weak_count(&strong));
/// ```
///
/// [`from_raw_in`]: Weak::from_raw_in
/// [`as_ptr`]: Weak::as_ptr
#[must_use = "losing the pointer will leak memory"]
#[unstable(feature = "allocator_api", issue = "32838")]
pub fn into_raw_with_allocator(self) -> (*const T, A) {
let this = mem::ManuallyDrop::new(self);
let result = this.as_ptr();
// Safety: `this` is ManuallyDrop so the allocator will not be double-dropped
let alloc = unsafe { ptr::read(&this.alloc) };
(result, alloc)
}

/// Converts a raw pointer previously created by [`into_raw`] back into `Weak<T>` in the provided
/// allocator.
///
Expand Down
Loading

0 comments on commit e8ada6a

Please sign in to comment.