Skip to content

Rollup of 6 pull requests #143521

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 18 commits into from
Jul 6, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
18 commits
Select commit Hold shift + click to select a range
1f01d9b
std: sys: net: uefi: tcp4: Implement read
Ayush1325 Jul 5, 2025
226b0fb
use `is_multiple_of` instead of manual modulo
folkertdev Jul 5, 2025
ed3711e
use `div_ceil` instead of manual logic
folkertdev Jul 5, 2025
6b71399
mbe: Introduce an enum for which part of a rule we're parsing
joshtriplett Jul 4, 2025
971feb6
tidy: use --bless for tidy spellcheck instead of spellcheck:fix
lolbinarycat Jul 5, 2025
c1ed2ac
tidy: add specific error message for trying to use `spellcheck:fix`.
lolbinarycat Jul 5, 2025
9c9c5b0
compiletest: print slightly more information on fs::write failure
RalfJung Jul 5, 2025
9f78173
bootstrap: add change_tracker entry for removal of spellcheck:fix
lolbinarycat Jul 5, 2025
a7382ea
mbe: Add a helper to parse a single `TokenTree`
joshtriplett Jul 4, 2025
0d5ab3e
mbe: Simplify a match to a let-else
joshtriplett Jul 4, 2025
63cfb3a
mbe: Defer checks for `compile_error!` until reporting an unused macr…
joshtriplett Jul 4, 2025
5bbab89
distinguish the duplicate item of rpitit
bvanjoi Jul 5, 2025
71b73e5
Rollup merge of #143416 - joshtriplett:mbe-simplifications, r=nnether…
matthiaskrgr Jul 6, 2025
19f8ba4
Rollup merge of #143470 - Ayush1325:uefi-tcp4-recv, r=joshtriplett
matthiaskrgr Jul 6, 2025
6fb00b1
Rollup merge of #143477 - folkertdev:use-is-multiple-of, r=joshtriplett
matthiaskrgr Jul 6, 2025
534c09a
Rollup merge of #143484 - bvanjoi:issue-140796, r=compiler-errors
matthiaskrgr Jul 6, 2025
e25654f
Rollup merge of #143493 - lolbinarycat:tidy-spellcheck-bless, r=Kobzol
matthiaskrgr Jul 6, 2025
097efc0
Rollup merge of #143504 - RalfJung:compiletest-err, r=jieyouxu
matthiaskrgr Jul 6, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 1 addition & 2 deletions compiler/rustc_abi/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -527,8 +527,7 @@ impl Size {
/// not a multiple of 8.
pub fn from_bits(bits: impl TryInto<u64>) -> Size {
let bits = bits.try_into().ok().unwrap();
// Avoid potential overflow from `bits + 7`.
Size { raw: bits / 8 + ((bits % 8) + 7) / 8 }
Size { raw: bits.div_ceil(8) }
}

#[inline]
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_borrowck/src/polonius/legacy/location.rs
Original file line number Diff line number Diff line change
Expand Up @@ -109,6 +109,6 @@ impl PoloniusLocationTable {
impl LocationIndex {
fn is_start(self) -> bool {
// even indices are start points; odd indices are mid points
(self.index() % 2) == 0
self.index().is_multiple_of(2)
}
}
2 changes: 1 addition & 1 deletion compiler/rustc_codegen_llvm/src/abi.rs
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ impl LlvmType for CastTarget {
"total size {:?} cannot be divided into units of zero size",
self.rest.total
);
if self.rest.total.bytes() % self.rest.unit.size.bytes() != 0 {
if !self.rest.total.bytes().is_multiple_of(self.rest.unit.size.bytes()) {
assert_eq!(self.rest.unit.kind, RegKind::Integer, "only int regs can be split");
}
self.rest.total.bytes().div_ceil(self.rest.unit.size.bytes())
Expand Down
4 changes: 2 additions & 2 deletions compiler/rustc_codegen_llvm/src/va_arg.rs
Original file line number Diff line number Diff line change
Expand Up @@ -172,10 +172,10 @@ fn emit_aapcs_va_arg<'ll, 'tcx>(

let gr_type = target_ty.is_any_ptr() || target_ty.is_integral();
let (reg_off, reg_top, slot_size) = if gr_type {
let nreg = (layout.size.bytes() + 7) / 8;
let nreg = layout.size.bytes().div_ceil(8);
(gr_offs, gr_top, nreg * 8)
} else {
let nreg = (layout.size.bytes() + 15) / 16;
let nreg = layout.size.bytes().div_ceil(16);
(vr_offs, vr_top, nreg * 16)
};

Expand Down
4 changes: 2 additions & 2 deletions compiler/rustc_const_eval/src/interpret/memory.rs
Original file line number Diff line number Diff line change
Expand Up @@ -537,7 +537,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {

#[inline]
fn is_offset_misaligned(offset: u64, align: Align) -> Option<Misalignment> {
if offset % align.bytes() == 0 {
if offset.is_multiple_of(align.bytes()) {
None
} else {
// The biggest power of two through which `offset` is divisible.
Expand Down Expand Up @@ -1554,7 +1554,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
// If the allocation is N-aligned, and the offset is not divisible by N,
// then `base + offset` has a non-zero remainder after division by `N`,
// which means `base + offset` cannot be null.
if offset.bytes() % info.align.bytes() != 0 {
if !offset.bytes().is_multiple_of(info.align.bytes()) {
return interp_ok(false);
}
// We don't know enough, this might be null.
Expand Down
4 changes: 4 additions & 0 deletions compiler/rustc_expand/src/base.rs
Original file line number Diff line number Diff line change
Expand Up @@ -348,6 +348,10 @@ pub trait TTMacroExpander {
span: Span,
input: TokenStream,
) -> MacroExpanderResult<'cx>;

fn get_unused_rule(&self, _rule_i: usize) -> Option<(&Ident, Span)> {
None
}
}

pub type MacroExpanderResult<'cx> = ExpandResult<Box<dyn MacResult + 'cx>, ()>;
Expand Down
10 changes: 5 additions & 5 deletions compiler/rustc_expand/src/mbe/diagnostics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ use rustc_span::source_map::SourceMap;
use rustc_span::{ErrorGuaranteed, Ident, Span};
use tracing::debug;

use super::macro_rules::{NoopTracker, parser_from_cx};
use super::macro_rules::{MacroRule, NoopTracker, parser_from_cx};
use crate::expand::{AstFragmentKind, parse_ast_fragment};
use crate::mbe::macro_parser::ParseResult::*;
use crate::mbe::macro_parser::{MatcherLoc, NamedParseResult, TtParser};
Expand All @@ -22,14 +22,14 @@ pub(super) fn failed_to_match_macro(
def_span: Span,
name: Ident,
arg: TokenStream,
lhses: &[Vec<MatcherLoc>],
rules: &[MacroRule],
) -> (Span, ErrorGuaranteed) {
debug!("failed to match macro");
// An error occurred, try the expansion again, tracking the expansion closely for better
// diagnostics.
let mut tracker = CollectTrackerAndEmitter::new(psess.dcx(), sp);

let try_success_result = try_match_macro(psess, name, &arg, lhses, &mut tracker);
let try_success_result = try_match_macro(psess, name, &arg, rules, &mut tracker);

if try_success_result.is_ok() {
// Nonterminal parser recovery might turn failed matches into successful ones,
Expand Down Expand Up @@ -80,12 +80,12 @@ pub(super) fn failed_to_match_macro(

// Check whether there's a missing comma in this macro call, like `println!("{}" a);`
if let Some((arg, comma_span)) = arg.add_comma() {
for lhs in lhses {
for rule in rules {
let parser = parser_from_cx(psess, arg.clone(), Recovery::Allowed);
let mut tt_parser = TtParser::new(name);

if let Success(_) =
tt_parser.parse_tt(&mut Cow::Borrowed(&parser), lhs, &mut NoopTracker)
tt_parser.parse_tt(&mut Cow::Borrowed(&parser), &rule.lhs, &mut NoopTracker)
{
if comma_span.is_dummy() {
err.note("you might be missing a comma");
Expand Down
140 changes: 50 additions & 90 deletions compiler/rustc_expand/src/mbe/macro_rules.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ use crate::base::{
};
use crate::expand::{AstFragment, AstFragmentKind, ensure_complete_parse, parse_ast_fragment};
use crate::mbe::macro_parser::{Error, ErrorReported, Failure, MatcherLoc, Success, TtParser};
use crate::mbe::quoted::{RulePart, parse_one_tt};
use crate::mbe::transcribe::transcribe;
use crate::mbe::{self, KleeneOp, macro_check};

Expand Down Expand Up @@ -97,13 +98,18 @@ impl<'a> ParserAnyMacro<'a> {
}
}

pub(super) struct MacroRule {
pub(super) lhs: Vec<MatcherLoc>,
lhs_span: Span,
rhs: mbe::TokenTree,
}

struct MacroRulesMacroExpander {
node_id: NodeId,
name: Ident,
span: Span,
transparency: Transparency,
lhses: Vec<Vec<MatcherLoc>>,
rhses: Vec<mbe::TokenTree>,
rules: Vec<MacroRule>,
}

impl TTMacroExpander for MacroRulesMacroExpander {
Expand All @@ -121,10 +127,15 @@ impl TTMacroExpander for MacroRulesMacroExpander {
self.name,
self.transparency,
input,
&self.lhses,
&self.rhses,
&self.rules,
))
}

fn get_unused_rule(&self, rule_i: usize) -> Option<(&Ident, Span)> {
// If the rhs contains an invocation like `compile_error!`, don't report it as unused.
let rule = &self.rules[rule_i];
if has_compile_error_macro(&rule.rhs) { None } else { Some((&self.name, rule.lhs_span)) }
}
}

struct DummyExpander(ErrorGuaranteed);
Expand Down Expand Up @@ -183,9 +194,8 @@ impl<'matcher> Tracker<'matcher> for NoopTracker {
}
}

/// Expands the rules based macro defined by `lhses` and `rhses` for a given
/// input `arg`.
#[instrument(skip(cx, transparency, arg, lhses, rhses))]
/// Expands the rules based macro defined by `rules` for a given input `arg`.
#[instrument(skip(cx, transparency, arg, rules))]
fn expand_macro<'cx>(
cx: &'cx mut ExtCtxt<'_>,
sp: Span,
Expand All @@ -194,8 +204,7 @@ fn expand_macro<'cx>(
name: Ident,
transparency: Transparency,
arg: TokenStream,
lhses: &[Vec<MatcherLoc>],
rhses: &[mbe::TokenTree],
rules: &[MacroRule],
) -> Box<dyn MacResult + 'cx> {
let psess = &cx.sess.psess;
// Macros defined in the current crate have a real node id,
Expand All @@ -208,15 +217,14 @@ fn expand_macro<'cx>(
}

// Track nothing for the best performance.
let try_success_result = try_match_macro(psess, name, &arg, lhses, &mut NoopTracker);
let try_success_result = try_match_macro(psess, name, &arg, rules, &mut NoopTracker);

match try_success_result {
Ok((i, named_matches)) => {
let (rhs, rhs_span): (&mbe::Delimited, DelimSpan) = match &rhses[i] {
mbe::TokenTree::Delimited(span, _, delimited) => (&delimited, *span),
_ => cx.dcx().span_bug(sp, "malformed macro rhs"),
Ok((i, rule, named_matches)) => {
let mbe::TokenTree::Delimited(rhs_span, _, ref rhs) = rule.rhs else {
cx.dcx().span_bug(sp, "malformed macro rhs");
};
let arm_span = rhses[i].span();
let arm_span = rule.rhs.span();

// rhs has holes ( `$id` and `$(...)` that need filled)
let id = cx.current_expansion.id;
Expand Down Expand Up @@ -262,7 +270,7 @@ fn expand_macro<'cx>(
Err(CanRetry::Yes) => {
// Retry and emit a better error.
let (span, guar) =
diagnostics::failed_to_match_macro(cx.psess(), sp, def_span, name, arg, lhses);
diagnostics::failed_to_match_macro(cx.psess(), sp, def_span, name, arg, rules);
cx.trace_macros_diag();
DummyResult::any(span, guar)
}
Expand All @@ -278,14 +286,14 @@ pub(super) enum CanRetry {
/// Try expanding the macro. Returns the index of the successful arm and its named_matches if it was successful,
/// and nothing if it failed. On failure, it's the callers job to use `track` accordingly to record all errors
/// correctly.
#[instrument(level = "debug", skip(psess, arg, lhses, track), fields(tracking = %T::description()))]
#[instrument(level = "debug", skip(psess, arg, rules, track), fields(tracking = %T::description()))]
pub(super) fn try_match_macro<'matcher, T: Tracker<'matcher>>(
psess: &ParseSess,
name: Ident,
arg: &TokenStream,
lhses: &'matcher [Vec<MatcherLoc>],
rules: &'matcher [MacroRule],
track: &mut T,
) -> Result<(usize, NamedMatches), CanRetry> {
) -> Result<(usize, &'matcher MacroRule, NamedMatches), CanRetry> {
// We create a base parser that can be used for the "black box" parts.
// Every iteration needs a fresh copy of that parser. However, the parser
// is not mutated on many of the iterations, particularly when dealing with
Expand All @@ -308,7 +316,7 @@ pub(super) fn try_match_macro<'matcher, T: Tracker<'matcher>>(
let parser = parser_from_cx(psess, arg.clone(), T::recovery());
// Try each arm's matchers.
let mut tt_parser = TtParser::new(name);
for (i, lhs) in lhses.iter().enumerate() {
for (i, rule) in rules.iter().enumerate() {
let _tracing_span = trace_span!("Matching arm", %i);

// Take a snapshot of the state of pre-expansion gating at this point.
Expand All @@ -317,7 +325,7 @@ pub(super) fn try_match_macro<'matcher, T: Tracker<'matcher>>(
// are not recorded. On the first `Success(..)`ful matcher, the spans are merged.
let mut gated_spans_snapshot = mem::take(&mut *psess.gated_spans.spans.borrow_mut());

let result = tt_parser.parse_tt(&mut Cow::Borrowed(&parser), lhs, track);
let result = tt_parser.parse_tt(&mut Cow::Borrowed(&parser), &rule.lhs, track);

track.after_arm(&result);

Expand All @@ -328,7 +336,7 @@ pub(super) fn try_match_macro<'matcher, T: Tracker<'matcher>>(
// Merge the gated spans from parsing the matcher with the preexisting ones.
psess.gated_spans.merge(gated_spans_snapshot);

return Ok((i, named_matches));
return Ok((i, rule, named_matches));
}
Failure(_) => {
trace!("Failed to match arm, trying the next one");
Expand Down Expand Up @@ -364,7 +372,7 @@ pub fn compile_declarative_macro(
span: Span,
node_id: NodeId,
edition: Edition,
) -> (SyntaxExtension, Vec<(usize, Span)>) {
) -> (SyntaxExtension, usize) {
let mk_syn_ext = |expander| {
SyntaxExtension::new(
sess,
Expand All @@ -377,7 +385,7 @@ pub fn compile_declarative_macro(
node_id != DUMMY_NODE_ID,
)
};
let dummy_syn_ext = |guar| (mk_syn_ext(Arc::new(DummyExpander(guar))), Vec::new());
let dummy_syn_ext = |guar| (mk_syn_ext(Arc::new(DummyExpander(guar))), 0);

let macro_rules = macro_def.macro_rules;
let exp_sep = if macro_rules { exp!(Semi) } else { exp!(Comma) };
Expand All @@ -389,21 +397,11 @@ pub fn compile_declarative_macro(
let mut guar = None;
let mut check_emission = |ret: Result<(), ErrorGuaranteed>| guar = guar.or(ret.err());

let mut lhses = Vec::new();
let mut rhses = Vec::new();
let mut rules = Vec::new();

while p.token != token::Eof {
let lhs_tt = p.parse_token_tree();
let lhs_tt = mbe::quoted::parse(
&TokenStream::new(vec![lhs_tt]),
true, // LHS
sess,
node_id,
features,
edition,
)
.pop()
.unwrap();
let lhs_tt = parse_one_tt(lhs_tt, RulePart::Pattern, sess, node_id, features, edition);
// We don't handle errors here, the driver will abort after parsing/expansion. We can
// report every error in every macro this way.
check_emission(check_lhs_nt_follows(sess, node_id, &lhs_tt));
Expand All @@ -421,20 +419,18 @@ pub fn compile_declarative_macro(
return dummy_syn_ext(guar);
}
let rhs_tt = p.parse_token_tree();
let rhs_tt = mbe::quoted::parse(
&TokenStream::new(vec![rhs_tt]),
false, // RHS
sess,
node_id,
features,
edition,
)
.pop()
.unwrap();
let rhs_tt = parse_one_tt(rhs_tt, RulePart::Body, sess, node_id, features, edition);
check_emission(check_rhs(sess, &rhs_tt));
check_emission(macro_check::check_meta_variables(&sess.psess, node_id, &lhs_tt, &rhs_tt));
lhses.push(lhs_tt);
rhses.push(rhs_tt);
let lhs_span = lhs_tt.span();
// Convert the lhs into `MatcherLoc` form, which is better for doing the
// actual matching.
let lhs = if let mbe::TokenTree::Delimited(.., delimited) = lhs_tt {
mbe::macro_parser::compute_locs(&delimited.tts)
} else {
return dummy_syn_ext(guar.unwrap());
};
rules.push(MacroRule { lhs, lhs_span, rhs: rhs_tt });
if p.token == token::Eof {
break;
}
Expand All @@ -443,7 +439,7 @@ pub fn compile_declarative_macro(
}
}

if lhses.is_empty() {
if rules.is_empty() {
let guar = sess.dcx().span_err(span, "macros must contain at least one rule");
return dummy_syn_ext(guar);
}
Expand All @@ -457,48 +453,12 @@ pub fn compile_declarative_macro(
return dummy_syn_ext(guar);
}

// Compute the spans of the macro rules for unused rule linting.
// Also, we are only interested in non-foreign macros.
let rule_spans = if node_id != DUMMY_NODE_ID {
lhses
.iter()
.zip(rhses.iter())
.enumerate()
// If the rhs contains an invocation like compile_error!,
// don't consider the rule for the unused rule lint.
.filter(|(_idx, (_lhs, rhs))| !has_compile_error_macro(rhs))
// We only take the span of the lhs here,
// so that the spans of created warnings are smaller.
.map(|(idx, (lhs, _rhs))| (idx, lhs.span()))
.collect::<Vec<_>>()
} else {
Vec::new()
};
// Return the number of rules for unused rule linting, if this is a local macro.
let nrules = if node_id != DUMMY_NODE_ID { rules.len() } else { 0 };

// Convert the lhses into `MatcherLoc` form, which is better for doing the
// actual matching.
let lhses = lhses
.iter()
.map(|lhs| {
// Ignore the delimiters around the matcher.
match lhs {
mbe::TokenTree::Delimited(.., delimited) => {
mbe::macro_parser::compute_locs(&delimited.tts)
}
_ => sess.dcx().span_bug(span, "malformed macro lhs"),
}
})
.collect();

let expander = Arc::new(MacroRulesMacroExpander {
name: ident,
span,
node_id,
transparency,
lhses,
rhses,
});
(mk_syn_ext(expander), rule_spans)
let expander =
Arc::new(MacroRulesMacroExpander { name: ident, span, node_id, transparency, rules });
(mk_syn_ext(expander), nrules)
}

fn check_lhs_nt_follows(
Expand Down
Loading
Loading