Rollup merge of #110694 - est31:builtin, r=petrochenkov
Implement builtin # syntax and use it for offset_of!(...) Add `builtin #` syntax to the parser, as well as a generic infrastructure to support both item and expression position builtin syntaxes. The PR also uses this infrastructure for the implementation of the `offset_of!` macro, added by #106934. cc `@petrochenkov` `@DrMeepster` cc #110680 `builtin #` tracking issue cc #106655 `offset_of!` tracking issue
This commit is contained in:
commit
dbd090c655
25 changed files with 333 additions and 136 deletions
|
@ -603,6 +603,7 @@ pub fn check_crate(krate: &ast::Crate, sess: &Session) {
|
|||
gate_all!(yeet_expr, "`do yeet` expression is experimental");
|
||||
gate_all!(dyn_star, "`dyn*` trait objects are experimental");
|
||||
gate_all!(const_closures, "const closures are experimental");
|
||||
gate_all!(builtin_syntax, "`builtin #` syntax is unstable");
|
||||
|
||||
if !visitor.features.negative_bounds {
|
||||
for &span in spans.get(&sym::negative_bounds).iter().copied().flatten() {
|
||||
|
|
|
@ -556,8 +556,7 @@ impl<'a> State<'a> {
|
|||
self.pclose();
|
||||
}
|
||||
ast::ExprKind::OffsetOf(container, fields) => {
|
||||
// FIXME: This should have its own syntax, distinct from a macro invocation.
|
||||
self.word("offset_of!");
|
||||
self.word("builtin # offset_of");
|
||||
self.popen();
|
||||
self.rbox(0, Inconsistent);
|
||||
self.print_type(container);
|
||||
|
|
|
@ -150,10 +150,6 @@ builtin_macros_format_pos_mismatch = {$n} positional {$n ->
|
|||
*[more] arguments
|
||||
} in format string, but {$desc}
|
||||
|
||||
builtin_macros_offset_of_expected_field = expected field
|
||||
|
||||
builtin_macros_offset_of_expected_two_args = expected 2 arguments
|
||||
|
||||
builtin_macros_test_case_non_item = `#[test_case]` attribute is only allowed on items
|
||||
|
||||
builtin_macros_test_bad_fn = {$kind} functions cannot be used for tests
|
||||
|
|
|
@ -44,7 +44,6 @@ mod format;
|
|||
mod format_foreign;
|
||||
mod global_allocator;
|
||||
mod log_syntax;
|
||||
mod offset_of;
|
||||
mod source_util;
|
||||
mod test;
|
||||
mod trace_macros;
|
||||
|
@ -92,7 +91,6 @@ pub fn register_builtin_macros(resolver: &mut dyn ResolverExpand) {
|
|||
line: source_util::expand_line,
|
||||
log_syntax: log_syntax::expand_log_syntax,
|
||||
module_path: source_util::expand_mod,
|
||||
offset_of: offset_of::expand_offset_of,
|
||||
option_env: env::expand_option_env,
|
||||
core_panic: edition_panic::expand_panic,
|
||||
std_panic: edition_panic::expand_panic,
|
||||
|
|
|
@ -1,99 +0,0 @@
|
|||
use rustc_ast as ast;
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::token;
|
||||
use rustc_ast::tokenstream::TokenStream;
|
||||
use rustc_errors::PResult;
|
||||
use rustc_expand::base::{self, *};
|
||||
use rustc_macros::Diagnostic;
|
||||
use rustc_parse::parser::Parser;
|
||||
use rustc_span::{symbol::Ident, Span};
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(builtin_macros_offset_of_expected_field)]
|
||||
struct ExpectedField {
|
||||
#[primary_span]
|
||||
span: Span,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(builtin_macros_offset_of_expected_two_args)]
|
||||
struct ExpectedTwoArgs {
|
||||
#[primary_span]
|
||||
span: Span,
|
||||
}
|
||||
|
||||
fn parse_field<'a>(cx: &ExtCtxt<'a>, p: &mut Parser<'a>) -> PResult<'a, Ident> {
|
||||
let token = p.token.uninterpolate();
|
||||
let field = match token.kind {
|
||||
token::Ident(name, _) => Ident::new(name, token.span),
|
||||
token::Literal(token::Lit { kind: token::Integer, symbol, suffix: None }) => {
|
||||
Ident::new(symbol, token.span)
|
||||
}
|
||||
_ => return Err(cx.create_err(ExpectedField { span: p.token.span })),
|
||||
};
|
||||
|
||||
p.bump();
|
||||
|
||||
Ok(field)
|
||||
}
|
||||
|
||||
fn parse_args<'a>(
|
||||
cx: &mut ExtCtxt<'a>,
|
||||
sp: Span,
|
||||
tts: TokenStream,
|
||||
) -> PResult<'a, (P<ast::Ty>, P<[Ident]>)> {
|
||||
let mut p = cx.new_parser_from_tts(tts);
|
||||
|
||||
let container = p.parse_ty()?;
|
||||
|
||||
p.expect(&token::Comma)?;
|
||||
|
||||
if p.eat(&token::Eof) {
|
||||
return Err(cx.create_err(ExpectedTwoArgs { span: sp }));
|
||||
}
|
||||
|
||||
let mut fields = Vec::new();
|
||||
|
||||
loop {
|
||||
let field = parse_field(cx, &mut p)?;
|
||||
fields.push(field);
|
||||
|
||||
if p.eat(&token::Dot) {
|
||||
continue;
|
||||
}
|
||||
|
||||
p.eat(&token::Comma);
|
||||
|
||||
if !p.eat(&token::Eof) {
|
||||
return Err(cx.create_err(ExpectedTwoArgs { span: sp }));
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
Ok((container, fields.into()))
|
||||
}
|
||||
|
||||
pub fn expand_offset_of<'cx>(
|
||||
cx: &'cx mut ExtCtxt<'_>,
|
||||
sp: Span,
|
||||
tts: TokenStream,
|
||||
) -> Box<dyn base::MacResult + 'cx> {
|
||||
match parse_args(cx, sp, tts) {
|
||||
Ok((container, fields)) => {
|
||||
let expr = P(ast::Expr {
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
kind: ast::ExprKind::OffsetOf(container, fields),
|
||||
span: sp,
|
||||
attrs: ast::AttrVec::new(),
|
||||
tokens: None,
|
||||
});
|
||||
|
||||
MacEager::expr(expr)
|
||||
}
|
||||
Err(mut err) => {
|
||||
err.emit();
|
||||
DummyResult::any(sp)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -313,6 +313,8 @@ declare_features! (
|
|||
(active, async_closure, "1.37.0", Some(62290), None),
|
||||
/// Allows async functions to be declared, implemented, and used in traits.
|
||||
(active, async_fn_in_trait, "1.66.0", Some(91611), None),
|
||||
/// Allows builtin # foo() syntax
|
||||
(active, builtin_syntax, "CURRENT_RUSTC_VERSION", Some(110680), None),
|
||||
/// Allows `c"foo"` literals.
|
||||
(active, c_str_literals, "CURRENT_RUSTC_VERSION", Some(105723), None),
|
||||
/// Treat `extern "C"` function as nounwind.
|
||||
|
|
|
@ -257,6 +257,10 @@ parse_invalid_literal_suffix_on_tuple_index = suffixes on a tuple index are inva
|
|||
.tuple_exception_line_2 = on proc macros, you'll want to use `syn::Index::from` or `proc_macro::Literal::*_unsuffixed` for code that will desugar to tuple field access
|
||||
.tuple_exception_line_3 = see issue #60210 <https://github.com/rust-lang/rust/issues/60210> for more information
|
||||
|
||||
parse_expected_builtin_ident = expected identifier after `builtin #`
|
||||
|
||||
parse_unknown_builtin_construct = unknown `builtin #` construct `{$name}`
|
||||
|
||||
parse_non_string_abi_literal = non-string ABI literal
|
||||
.suggestion = specify the ABI with a string literal
|
||||
|
||||
|
|
|
@ -2644,3 +2644,18 @@ pub(crate) struct MalformedCfgAttr {
|
|||
pub span: Span,
|
||||
pub sugg: &'static str,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(parse_unknown_builtin_construct)]
|
||||
pub(crate) struct UnknownBuiltinConstruct {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
pub name: Symbol,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(parse_expected_builtin_ident)]
|
||||
pub(crate) struct ExpectedBuiltinIdent {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
}
|
||||
|
|
|
@ -1300,6 +1300,8 @@ impl<'a> Parser<'a> {
|
|||
})
|
||||
} else if self.check(&token::OpenDelim(Delimiter::Bracket)) {
|
||||
self.parse_expr_array_or_repeat(Delimiter::Bracket)
|
||||
} else if self.is_builtin() {
|
||||
self.parse_expr_builtin()
|
||||
} else if self.check_path() {
|
||||
self.parse_expr_path_start()
|
||||
} else if self.check_keyword(kw::Move)
|
||||
|
@ -1766,6 +1768,61 @@ impl<'a> Parser<'a> {
|
|||
self.maybe_recover_from_bad_qpath(expr)
|
||||
}
|
||||
|
||||
/// Parse `builtin # ident(args,*)`.
|
||||
fn parse_expr_builtin(&mut self) -> PResult<'a, P<Expr>> {
|
||||
self.parse_builtin(|this, lo, ident| {
|
||||
if ident.name == sym::offset_of {
|
||||
return Ok(Some(this.parse_expr_offset_of(lo)?));
|
||||
}
|
||||
|
||||
Ok(None)
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn parse_builtin<T>(
|
||||
&mut self,
|
||||
parse: impl FnOnce(&mut Parser<'a>, Span, Ident) -> PResult<'a, Option<T>>,
|
||||
) -> PResult<'a, T> {
|
||||
let lo = self.token.span;
|
||||
|
||||
self.bump(); // `builtin`
|
||||
self.bump(); // `#`
|
||||
|
||||
let Some((ident, false)) = self.token.ident() else {
|
||||
let err = errors::ExpectedBuiltinIdent { span: self.token.span }
|
||||
.into_diagnostic(&self.sess.span_diagnostic);
|
||||
return Err(err);
|
||||
};
|
||||
self.sess.gated_spans.gate(sym::builtin_syntax, ident.span);
|
||||
self.bump();
|
||||
|
||||
self.expect(&TokenKind::OpenDelim(Delimiter::Parenthesis))?;
|
||||
let ret = if let Some(res) = parse(self, lo, ident)? {
|
||||
Ok(res)
|
||||
} else {
|
||||
let err = errors::UnknownBuiltinConstruct { span: lo.to(ident.span), name: ident.name }
|
||||
.into_diagnostic(&self.sess.span_diagnostic);
|
||||
return Err(err);
|
||||
};
|
||||
self.expect(&TokenKind::CloseDelim(Delimiter::Parenthesis))?;
|
||||
|
||||
ret
|
||||
}
|
||||
|
||||
pub(crate) fn parse_expr_offset_of(&mut self, lo: Span) -> PResult<'a, P<Expr>> {
|
||||
let container = self.parse_ty()?;
|
||||
self.expect(&TokenKind::Comma)?;
|
||||
|
||||
let seq_sep = SeqSep { sep: Some(token::Dot), trailing_sep_allowed: false };
|
||||
let (fields, _trailing, _recovered) = self.parse_seq_to_before_end(
|
||||
&TokenKind::CloseDelim(Delimiter::Parenthesis),
|
||||
seq_sep,
|
||||
Parser::parse_field_name,
|
||||
)?;
|
||||
let span = lo.to(self.token.span);
|
||||
Ok(self.mk_expr(span, ExprKind::OffsetOf(container, fields.to_vec().into())))
|
||||
}
|
||||
|
||||
/// Returns a string literal if the next token is a string literal.
|
||||
/// In case of error returns `Some(lit)` if the next token is a literal with a wrong kind,
|
||||
/// and returns `None` if the next token is not literal at all.
|
||||
|
@ -2835,6 +2892,10 @@ impl<'a> Parser<'a> {
|
|||
})
|
||||
}
|
||||
|
||||
pub(crate) fn is_builtin(&self) -> bool {
|
||||
self.token.is_keyword(kw::Builtin) && self.look_ahead(1, |t| *t == token::Pound)
|
||||
}
|
||||
|
||||
/// Parses a `try {...}` expression (`try` token already eaten).
|
||||
fn parse_try_block(&mut self, span_lo: Span) -> PResult<'a, P<Expr>> {
|
||||
let (attrs, body) = self.parse_inner_attrs_and_block()?;
|
||||
|
|
|
@ -265,6 +265,9 @@ impl<'a> Parser<'a> {
|
|||
// UNION ITEM
|
||||
self.bump(); // `union`
|
||||
self.parse_item_union()?
|
||||
} else if self.is_builtin() {
|
||||
// BUILTIN# ITEM
|
||||
return self.parse_item_builtin();
|
||||
} else if self.eat_keyword(kw::Macro) {
|
||||
// MACROS 2.0 ITEM
|
||||
self.parse_item_decl_macro(lo)?
|
||||
|
@ -434,6 +437,11 @@ impl<'a> Parser<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
fn parse_item_builtin(&mut self) -> PResult<'a, Option<ItemInfo>> {
|
||||
// To be expanded
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
/// Parses an item macro, e.g., `item!();`.
|
||||
fn parse_item_macro(&mut self, vis: &Visibility) -> PResult<'a, MacCall> {
|
||||
let path = self.parse_path(PathStyle::Mod)?; // `foo::bar`
|
||||
|
|
|
@ -90,7 +90,11 @@ impl<'a> Parser<'a> {
|
|||
attrs,
|
||||
errors::InvalidVariableDeclarationSub::UseLetNotVar,
|
||||
)?
|
||||
} else if self.check_path() && !self.token.is_qpath_start() && !self.is_path_start_item() {
|
||||
} else if self.check_path()
|
||||
&& !self.token.is_qpath_start()
|
||||
&& !self.is_path_start_item()
|
||||
&& !self.is_builtin()
|
||||
{
|
||||
// We have avoided contextual keywords like `union`, items with `crate` visibility,
|
||||
// or `auto trait` items. We aim to parse an arbitrary path `a::b` but not something
|
||||
// that starts like a path (1 token), but it fact not a path.
|
||||
|
|
|
@ -95,6 +95,7 @@ symbols! {
|
|||
|
||||
// Weak keywords, have special meaning only in specific contexts.
|
||||
Auto: "auto",
|
||||
Builtin: "builtin",
|
||||
Catch: "catch",
|
||||
Default: "default",
|
||||
MacroRules: "macro_rules",
|
||||
|
@ -440,6 +441,7 @@ symbols! {
|
|||
breakpoint,
|
||||
bridge,
|
||||
bswap,
|
||||
builtin_syntax,
|
||||
c_str,
|
||||
c_str_literals,
|
||||
c_unwind,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue