Remove ast:: & base:: prefixes from some builtin macros

This commit is contained in:
Lieselotte 2024-02-25 22:25:26 +01:00
parent c440a5b814
commit 34eae07ee5
No known key found for this signature in database
GPG key ID: 43A6A32F83A6F9B1
7 changed files with 116 additions and 118 deletions

View file

@ -8,14 +8,14 @@ use rustc_ast::token;
use rustc_ast::tokenstream::TokenStream; use rustc_ast::tokenstream::TokenStream;
use rustc_attr as attr; use rustc_attr as attr;
use rustc_errors::PResult; use rustc_errors::PResult;
use rustc_expand::base::{self, *}; use rustc_expand::base::{DummyResult, ExtCtxt, MacEager, MacResult};
use rustc_span::Span; use rustc_span::Span;
pub fn expand_cfg( pub fn expand_cfg(
cx: &mut ExtCtxt<'_>, cx: &mut ExtCtxt<'_>,
sp: Span, sp: Span,
tts: TokenStream, tts: TokenStream,
) -> Box<dyn base::MacResult + 'static> { ) -> Box<dyn MacResult + 'static> {
let sp = cx.with_def_site_ctxt(sp); let sp = cx.with_def_site_ctxt(sp);
match parse_cfg(cx, sp, tts) { match parse_cfg(cx, sp, tts) {

View file

@ -1,14 +1,14 @@
// The compiler code necessary to support the compile_error! extension. // The compiler code necessary to support the compile_error! extension.
use rustc_ast::tokenstream::TokenStream; use rustc_ast::tokenstream::TokenStream;
use rustc_expand::base::{self, *}; use rustc_expand::base::{get_single_str_from_tts, DummyResult, ExtCtxt, MacResult};
use rustc_span::Span; use rustc_span::Span;
pub fn expand_compile_error<'cx>( pub fn expand_compile_error<'cx>(
cx: &'cx mut ExtCtxt<'_>, cx: &'cx mut ExtCtxt<'_>,
sp: Span, sp: Span,
tts: TokenStream, tts: TokenStream,
) -> Box<dyn base::MacResult + 'cx> { ) -> Box<dyn MacResult + 'cx> {
let var = match get_single_str_from_tts(cx, sp, tts, "compile_error!") { let var = match get_single_str_from_tts(cx, sp, tts, "compile_error!") {
Ok(var) => var, Ok(var) => var,
Err(guar) => return DummyResult::any(sp, guar), Err(guar) => return DummyResult::any(sp, guar),

View file

@ -1,17 +1,17 @@
use rustc_ast as ast;
use rustc_ast::tokenstream::TokenStream; use rustc_ast::tokenstream::TokenStream;
use rustc_expand::base::{self, DummyResult}; use rustc_ast::{ExprKind, LitKind, UnOp};
use rustc_expand::base::{get_exprs_from_tts, DummyResult, ExtCtxt, MacEager, MacResult};
use rustc_session::errors::report_lit_error; use rustc_session::errors::report_lit_error;
use rustc_span::symbol::Symbol; use rustc_span::symbol::Symbol;
use crate::errors; use crate::errors;
pub fn expand_concat( pub fn expand_concat(
cx: &mut base::ExtCtxt<'_>, cx: &mut ExtCtxt<'_>,
sp: rustc_span::Span, sp: rustc_span::Span,
tts: TokenStream, tts: TokenStream,
) -> Box<dyn base::MacResult + 'static> { ) -> Box<dyn MacResult + 'static> {
let es = match base::get_exprs_from_tts(cx, tts) { let es = match get_exprs_from_tts(cx, tts) {
Ok(es) => es, Ok(es) => es,
Err(guar) => return DummyResult::any(sp, guar), Err(guar) => return DummyResult::any(sp, guar),
}; };
@ -20,26 +20,26 @@ pub fn expand_concat(
let mut guar = None; let mut guar = None;
for e in es { for e in es {
match e.kind { match e.kind {
ast::ExprKind::Lit(token_lit) => match ast::LitKind::from_token_lit(token_lit) { ExprKind::Lit(token_lit) => match LitKind::from_token_lit(token_lit) {
Ok(ast::LitKind::Str(s, _) | ast::LitKind::Float(s, _)) => { Ok(LitKind::Str(s, _) | LitKind::Float(s, _)) => {
accumulator.push_str(s.as_str()); accumulator.push_str(s.as_str());
} }
Ok(ast::LitKind::Char(c)) => { Ok(LitKind::Char(c)) => {
accumulator.push(c); accumulator.push(c);
} }
Ok(ast::LitKind::Int(i, _)) => { Ok(LitKind::Int(i, _)) => {
accumulator.push_str(&i.to_string()); accumulator.push_str(&i.to_string());
} }
Ok(ast::LitKind::Bool(b)) => { Ok(LitKind::Bool(b)) => {
accumulator.push_str(&b.to_string()); accumulator.push_str(&b.to_string());
} }
Ok(ast::LitKind::CStr(..)) => { Ok(LitKind::CStr(..)) => {
guar = Some(cx.dcx().emit_err(errors::ConcatCStrLit { span: e.span })); guar = Some(cx.dcx().emit_err(errors::ConcatCStrLit { span: e.span }));
} }
Ok(ast::LitKind::Byte(..) | ast::LitKind::ByteStr(..)) => { Ok(LitKind::Byte(..) | LitKind::ByteStr(..)) => {
guar = Some(cx.dcx().emit_err(errors::ConcatBytestr { span: e.span })); guar = Some(cx.dcx().emit_err(errors::ConcatBytestr { span: e.span }));
} }
Ok(ast::LitKind::Err(guarantee)) => { Ok(LitKind::Err(guarantee)) => {
guar = Some(guarantee); guar = Some(guarantee);
} }
Err(err) => { Err(err) => {
@ -47,25 +47,23 @@ pub fn expand_concat(
} }
}, },
// We also want to allow negative numeric literals. // We also want to allow negative numeric literals.
ast::ExprKind::Unary(ast::UnOp::Neg, ref expr) ExprKind::Unary(UnOp::Neg, ref expr) if let ExprKind::Lit(token_lit) = expr.kind => {
if let ast::ExprKind::Lit(token_lit) = expr.kind => match LitKind::from_token_lit(token_lit) {
{ Ok(LitKind::Int(i, _)) => accumulator.push_str(&format!("-{i}")),
match ast::LitKind::from_token_lit(token_lit) { Ok(LitKind::Float(f, _)) => accumulator.push_str(&format!("-{f}")),
Ok(ast::LitKind::Int(i, _)) => accumulator.push_str(&format!("-{i}")),
Ok(ast::LitKind::Float(f, _)) => accumulator.push_str(&format!("-{f}")),
Err(err) => { Err(err) => {
guar = Some(report_lit_error(&cx.sess.parse_sess, err, token_lit, e.span)); guar = Some(report_lit_error(&cx.sess.parse_sess, err, token_lit, e.span));
} }
_ => missing_literal.push(e.span), _ => missing_literal.push(e.span),
} }
} }
ast::ExprKind::IncludedBytes(..) => { ExprKind::IncludedBytes(..) => {
cx.dcx().emit_err(errors::ConcatBytestr { span: e.span }); cx.dcx().emit_err(errors::ConcatBytestr { span: e.span });
} }
ast::ExprKind::Err(guarantee) => { ExprKind::Err(guarantee) => {
guar = Some(guarantee); guar = Some(guarantee);
} }
ast::ExprKind::Dummy => cx.dcx().span_bug(e.span, "concatenating `ExprKind::Dummy`"), ExprKind::Dummy => cx.dcx().span_bug(e.span, "concatenating `ExprKind::Dummy`"),
_ => { _ => {
missing_literal.push(e.span); missing_literal.push(e.span);
} }
@ -79,5 +77,5 @@ pub fn expand_concat(
return DummyResult::any(sp, guar); return DummyResult::any(sp, guar);
} }
let sp = cx.with_def_site_ctxt(sp); let sp = cx.with_def_site_ctxt(sp);
base::MacEager::expr(cx.expr_str(sp, Symbol::intern(&accumulator))) MacEager::expr(cx.expr_str(sp, Symbol::intern(&accumulator)))
} }

View file

@ -1,6 +1,5 @@
use rustc_ast as ast; use rustc_ast::{ptr::P, token, tokenstream::TokenStream, ExprKind, LitIntType, LitKind, UintTy};
use rustc_ast::{ptr::P, tokenstream::TokenStream}; use rustc_expand::base::{get_exprs_from_tts, DummyResult, ExtCtxt, MacEager, MacResult};
use rustc_expand::base::{self, DummyResult};
use rustc_session::errors::report_lit_error; use rustc_session::errors::report_lit_error;
use rustc_span::{ErrorGuaranteed, Span}; use rustc_span::{ErrorGuaranteed, Span};
@ -8,8 +7,8 @@ use crate::errors;
/// Emits errors for literal expressions that are invalid inside and outside of an array. /// Emits errors for literal expressions that are invalid inside and outside of an array.
fn invalid_type_err( fn invalid_type_err(
cx: &mut base::ExtCtxt<'_>, cx: &mut ExtCtxt<'_>,
token_lit: ast::token::Lit, token_lit: token::Lit,
span: Span, span: Span,
is_nested: bool, is_nested: bool,
) -> ErrorGuaranteed { ) -> ErrorGuaranteed {
@ -18,18 +17,18 @@ fn invalid_type_err(
}; };
let snippet = cx.sess.source_map().span_to_snippet(span).ok(); let snippet = cx.sess.source_map().span_to_snippet(span).ok();
let dcx = cx.dcx(); let dcx = cx.dcx();
match ast::LitKind::from_token_lit(token_lit) { match LitKind::from_token_lit(token_lit) {
Ok(ast::LitKind::CStr(_, _)) => { Ok(LitKind::CStr(_, _)) => {
// Avoid ambiguity in handling of terminal `NUL` by refusing to // Avoid ambiguity in handling of terminal `NUL` by refusing to
// concatenate C string literals as bytes. // concatenate C string literals as bytes.
dcx.emit_err(errors::ConcatCStrLit { span }) dcx.emit_err(errors::ConcatCStrLit { span })
} }
Ok(ast::LitKind::Char(_)) => { Ok(LitKind::Char(_)) => {
let sugg = let sugg =
snippet.map(|snippet| ConcatBytesInvalidSuggestion::CharLit { span, snippet }); snippet.map(|snippet| ConcatBytesInvalidSuggestion::CharLit { span, snippet });
dcx.emit_err(ConcatBytesInvalid { span, lit_kind: "character", sugg }) dcx.emit_err(ConcatBytesInvalid { span, lit_kind: "character", sugg })
} }
Ok(ast::LitKind::Str(_, _)) => { Ok(LitKind::Str(_, _)) => {
// suggestion would be invalid if we are nested // suggestion would be invalid if we are nested
let sugg = if !is_nested { let sugg = if !is_nested {
snippet.map(|snippet| ConcatBytesInvalidSuggestion::StrLit { span, snippet }) snippet.map(|snippet| ConcatBytesInvalidSuggestion::StrLit { span, snippet })
@ -38,27 +37,24 @@ fn invalid_type_err(
}; };
dcx.emit_err(ConcatBytesInvalid { span, lit_kind: "string", sugg }) dcx.emit_err(ConcatBytesInvalid { span, lit_kind: "string", sugg })
} }
Ok(ast::LitKind::Float(_, _)) => { Ok(LitKind::Float(_, _)) => {
dcx.emit_err(ConcatBytesInvalid { span, lit_kind: "float", sugg: None }) dcx.emit_err(ConcatBytesInvalid { span, lit_kind: "float", sugg: None })
} }
Ok(ast::LitKind::Bool(_)) => { Ok(LitKind::Bool(_)) => {
dcx.emit_err(ConcatBytesInvalid { span, lit_kind: "boolean", sugg: None }) dcx.emit_err(ConcatBytesInvalid { span, lit_kind: "boolean", sugg: None })
} }
Ok(ast::LitKind::Int(_, _)) if !is_nested => { Ok(LitKind::Int(_, _)) if !is_nested => {
let sugg = let sugg =
snippet.map(|snippet| ConcatBytesInvalidSuggestion::IntLit { span: span, snippet }); snippet.map(|snippet| ConcatBytesInvalidSuggestion::IntLit { span, snippet });
dcx.emit_err(ConcatBytesInvalid { span, lit_kind: "numeric", sugg }) dcx.emit_err(ConcatBytesInvalid { span, lit_kind: "numeric", sugg })
} }
Ok(ast::LitKind::Int( Ok(LitKind::Int(val, LitIntType::Unsuffixed | LitIntType::Unsigned(UintTy::U8))) => {
val,
ast::LitIntType::Unsuffixed | ast::LitIntType::Unsigned(ast::UintTy::U8),
)) => {
assert!(val.get() > u8::MAX.into()); // must be an error assert!(val.get() > u8::MAX.into()); // must be an error
dcx.emit_err(ConcatBytesOob { span }) dcx.emit_err(ConcatBytesOob { span })
} }
Ok(ast::LitKind::Int(_, _)) => dcx.emit_err(ConcatBytesNonU8 { span }), Ok(LitKind::Int(_, _)) => dcx.emit_err(ConcatBytesNonU8 { span }),
Ok(ast::LitKind::ByteStr(..) | ast::LitKind::Byte(_)) => unreachable!(), Ok(LitKind::ByteStr(..) | LitKind::Byte(_)) => unreachable!(),
Ok(ast::LitKind::Err(guar)) => guar, Ok(LitKind::Err(guar)) => guar,
Err(err) => report_lit_error(&cx.sess.parse_sess, err, token_lit, span), Err(err) => report_lit_error(&cx.sess.parse_sess, err, token_lit, span),
} }
} }
@ -68,7 +64,7 @@ fn invalid_type_err(
/// Otherwise, returns `None`, and either pushes the `expr`'s span to `missing_literals` or /// Otherwise, returns `None`, and either pushes the `expr`'s span to `missing_literals` or
/// updates `guar` accordingly. /// updates `guar` accordingly.
fn handle_array_element( fn handle_array_element(
cx: &mut base::ExtCtxt<'_>, cx: &mut ExtCtxt<'_>,
guar: &mut Option<ErrorGuaranteed>, guar: &mut Option<ErrorGuaranteed>,
missing_literals: &mut Vec<rustc_span::Span>, missing_literals: &mut Vec<rustc_span::Span>,
expr: &P<rustc_ast::Expr>, expr: &P<rustc_ast::Expr>,
@ -76,16 +72,16 @@ fn handle_array_element(
let dcx = cx.dcx(); let dcx = cx.dcx();
match expr.kind { match expr.kind {
ast::ExprKind::Lit(token_lit) => { ExprKind::Lit(token_lit) => {
match ast::LitKind::from_token_lit(token_lit) { match LitKind::from_token_lit(token_lit) {
Ok(ast::LitKind::Int( Ok(LitKind::Int(
val, val,
ast::LitIntType::Unsuffixed | ast::LitIntType::Unsigned(ast::UintTy::U8), LitIntType::Unsuffixed | LitIntType::Unsigned(UintTy::U8),
)) if let Ok(val) = u8::try_from(val.get()) => { )) if let Ok(val) = u8::try_from(val.get()) => {
return Some(val); return Some(val);
} }
Ok(ast::LitKind::Byte(val)) => return Some(val), Ok(LitKind::Byte(val)) => return Some(val),
Ok(ast::LitKind::ByteStr(..)) => { Ok(LitKind::ByteStr(..)) => {
guar.get_or_insert_with(|| { guar.get_or_insert_with(|| {
dcx.emit_err(errors::ConcatBytesArray { span: expr.span, bytestr: true }) dcx.emit_err(errors::ConcatBytesArray { span: expr.span, bytestr: true })
}); });
@ -95,12 +91,12 @@ fn handle_array_element(
} }
}; };
} }
ast::ExprKind::Array(_) | ast::ExprKind::Repeat(_, _) => { ExprKind::Array(_) | ExprKind::Repeat(_, _) => {
guar.get_or_insert_with(|| { guar.get_or_insert_with(|| {
dcx.emit_err(errors::ConcatBytesArray { span: expr.span, bytestr: false }) dcx.emit_err(errors::ConcatBytesArray { span: expr.span, bytestr: false })
}); });
} }
ast::ExprKind::IncludedBytes(..) => { ExprKind::IncludedBytes(..) => {
guar.get_or_insert_with(|| { guar.get_or_insert_with(|| {
dcx.emit_err(errors::ConcatBytesArray { span: expr.span, bytestr: false }) dcx.emit_err(errors::ConcatBytesArray { span: expr.span, bytestr: false })
}); });
@ -112,11 +108,11 @@ fn handle_array_element(
} }
pub fn expand_concat_bytes( pub fn expand_concat_bytes(
cx: &mut base::ExtCtxt<'_>, cx: &mut ExtCtxt<'_>,
sp: rustc_span::Span, sp: Span,
tts: TokenStream, tts: TokenStream,
) -> Box<dyn base::MacResult + 'static> { ) -> Box<dyn MacResult + 'static> {
let es = match base::get_exprs_from_tts(cx, tts) { let es = match get_exprs_from_tts(cx, tts) {
Ok(es) => es, Ok(es) => es,
Err(guar) => return DummyResult::any(sp, guar), Err(guar) => return DummyResult::any(sp, guar),
}; };
@ -125,7 +121,7 @@ pub fn expand_concat_bytes(
let mut guar = None; let mut guar = None;
for e in es { for e in es {
match &e.kind { match &e.kind {
ast::ExprKind::Array(exprs) => { ExprKind::Array(exprs) => {
for expr in exprs { for expr in exprs {
if let Some(elem) = if let Some(elem) =
handle_array_element(cx, &mut guar, &mut missing_literals, expr) handle_array_element(cx, &mut guar, &mut missing_literals, expr)
@ -134,10 +130,9 @@ pub fn expand_concat_bytes(
} }
} }
} }
ast::ExprKind::Repeat(expr, count) => { ExprKind::Repeat(expr, count) => {
if let ast::ExprKind::Lit(token_lit) = count.value.kind if let ExprKind::Lit(token_lit) = count.value.kind
&& let Ok(ast::LitKind::Int(count_val, _)) = && let Ok(LitKind::Int(count_val, _)) = LitKind::from_token_lit(token_lit)
ast::LitKind::from_token_lit(token_lit)
{ {
if let Some(elem) = if let Some(elem) =
handle_array_element(cx, &mut guar, &mut missing_literals, expr) handle_array_element(cx, &mut guar, &mut missing_literals, expr)
@ -152,24 +147,24 @@ pub fn expand_concat_bytes(
); );
} }
} }
&ast::ExprKind::Lit(token_lit) => match ast::LitKind::from_token_lit(token_lit) { &ExprKind::Lit(token_lit) => match LitKind::from_token_lit(token_lit) {
Ok(ast::LitKind::Byte(val)) => { Ok(LitKind::Byte(val)) => {
accumulator.push(val); accumulator.push(val);
} }
Ok(ast::LitKind::ByteStr(ref bytes, _)) => { Ok(LitKind::ByteStr(ref bytes, _)) => {
accumulator.extend_from_slice(bytes); accumulator.extend_from_slice(bytes);
} }
_ => { _ => {
guar.get_or_insert_with(|| invalid_type_err(cx, token_lit, e.span, false)); guar.get_or_insert_with(|| invalid_type_err(cx, token_lit, e.span, false));
} }
}, },
ast::ExprKind::IncludedBytes(bytes) => { ExprKind::IncludedBytes(bytes) => {
accumulator.extend_from_slice(bytes); accumulator.extend_from_slice(bytes);
} }
ast::ExprKind::Err(guarantee) => { ExprKind::Err(guarantee) => {
guar = Some(*guarantee); guar = Some(*guarantee);
} }
ast::ExprKind::Dummy => cx.dcx().span_bug(e.span, "concatenating `ExprKind::Dummy`"), ExprKind::Dummy => cx.dcx().span_bug(e.span, "concatenating `ExprKind::Dummy`"),
_ => { _ => {
missing_literals.push(e.span); missing_literals.push(e.span);
} }
@ -177,10 +172,10 @@ pub fn expand_concat_bytes(
} }
if !missing_literals.is_empty() { if !missing_literals.is_empty() {
let guar = cx.dcx().emit_err(errors::ConcatBytesMissingLiteral { spans: missing_literals }); let guar = cx.dcx().emit_err(errors::ConcatBytesMissingLiteral { spans: missing_literals });
return base::MacEager::expr(DummyResult::raw_expr(sp, Some(guar))); return MacEager::expr(DummyResult::raw_expr(sp, Some(guar)));
} else if let Some(guar) = guar { } else if let Some(guar) = guar {
return base::MacEager::expr(DummyResult::raw_expr(sp, Some(guar))); return MacEager::expr(DummyResult::raw_expr(sp, Some(guar)));
} }
let sp = cx.with_def_site_ctxt(sp); let sp = cx.with_def_site_ctxt(sp);
base::MacEager::expr(cx.expr_byte_str(sp, accumulator)) MacEager::expr(cx.expr_byte_str(sp, accumulator))
} }

View file

@ -1,8 +1,8 @@
use rustc_ast as ast;
use rustc_ast::ptr::P; use rustc_ast::ptr::P;
use rustc_ast::token::{self, Token}; use rustc_ast::token::{self, Token};
use rustc_ast::tokenstream::{TokenStream, TokenTree}; use rustc_ast::tokenstream::{TokenStream, TokenTree};
use rustc_expand::base::{self, *}; use rustc_ast::{AttrVec, Expr, ExprKind, Path, Ty, TyKind, DUMMY_NODE_ID};
use rustc_expand::base::{DummyResult, ExtCtxt, MacResult};
use rustc_span::symbol::{Ident, Symbol}; use rustc_span::symbol::{Ident, Symbol};
use rustc_span::Span; use rustc_span::Span;
@ -12,7 +12,7 @@ pub fn expand_concat_idents<'cx>(
cx: &'cx mut ExtCtxt<'_>, cx: &'cx mut ExtCtxt<'_>,
sp: Span, sp: Span,
tts: TokenStream, tts: TokenStream,
) -> Box<dyn base::MacResult + 'cx> { ) -> Box<dyn MacResult + 'cx> {
if tts.is_empty() { if tts.is_empty() {
let guar = cx.dcx().emit_err(errors::ConcatIdentsMissingArgs { span: sp }); let guar = cx.dcx().emit_err(errors::ConcatIdentsMissingArgs { span: sp });
return DummyResult::any(sp, guar); return DummyResult::any(sp, guar);
@ -47,21 +47,21 @@ pub fn expand_concat_idents<'cx>(
ident: Ident, ident: Ident,
} }
impl base::MacResult for ConcatIdentsResult { impl MacResult for ConcatIdentsResult {
fn make_expr(self: Box<Self>) -> Option<P<ast::Expr>> { fn make_expr(self: Box<Self>) -> Option<P<Expr>> {
Some(P(ast::Expr { Some(P(Expr {
id: ast::DUMMY_NODE_ID, id: DUMMY_NODE_ID,
kind: ast::ExprKind::Path(None, ast::Path::from_ident(self.ident)), kind: ExprKind::Path(None, Path::from_ident(self.ident)),
span: self.ident.span, span: self.ident.span,
attrs: ast::AttrVec::new(), attrs: AttrVec::new(),
tokens: None, tokens: None,
})) }))
} }
fn make_ty(self: Box<Self>) -> Option<P<ast::Ty>> { fn make_ty(self: Box<Self>) -> Option<P<Ty>> {
Some(P(ast::Ty { Some(P(Ty {
id: ast::DUMMY_NODE_ID, id: DUMMY_NODE_ID,
kind: ast::TyKind::Path(None, ast::Path::from_ident(self.ident)), kind: TyKind::Path(None, Path::from_ident(self.ident)),
span: self.ident.span, span: self.ident.span,
tokens: None, tokens: None,
})) }))

View file

@ -3,9 +3,13 @@
// interface. // interface.
// //
use rustc_ast::token::{self, LitKind};
use rustc_ast::tokenstream::TokenStream; use rustc_ast::tokenstream::TokenStream;
use rustc_ast::{self as ast, AstDeref, GenericArg}; use rustc_ast::{AstDeref, ExprKind, GenericArg, Mutability};
use rustc_expand::base::{self, *}; use rustc_expand::base::{
expr_to_string, get_exprs_from_tts, get_single_str_from_tts, DummyResult, ExtCtxt, MacEager,
MacResult,
};
use rustc_span::symbol::{kw, sym, Ident, Symbol}; use rustc_span::symbol::{kw, sym, Ident, Symbol};
use rustc_span::Span; use rustc_span::Span;
use std::env; use std::env;
@ -27,7 +31,7 @@ pub fn expand_option_env<'cx>(
cx: &'cx mut ExtCtxt<'_>, cx: &'cx mut ExtCtxt<'_>,
sp: Span, sp: Span,
tts: TokenStream, tts: TokenStream,
) -> Box<dyn base::MacResult + 'cx> { ) -> Box<dyn MacResult + 'cx> {
let var = match get_single_str_from_tts(cx, sp, tts, "option_env!") { let var = match get_single_str_from_tts(cx, sp, tts, "option_env!") {
Ok(var) => var, Ok(var) => var,
Err(guar) => return DummyResult::any(sp, guar), Err(guar) => return DummyResult::any(sp, guar),
@ -47,7 +51,7 @@ pub fn expand_option_env<'cx>(
sp, sp,
cx.ty_ident(sp, Ident::new(sym::str, sp)), cx.ty_ident(sp, Ident::new(sym::str, sp)),
Some(lt), Some(lt),
ast::Mutability::Not, Mutability::Not,
))], ))],
)) ))
} }
@ -64,7 +68,7 @@ pub fn expand_env<'cx>(
cx: &'cx mut ExtCtxt<'_>, cx: &'cx mut ExtCtxt<'_>,
sp: Span, sp: Span,
tts: TokenStream, tts: TokenStream,
) -> Box<dyn base::MacResult + 'cx> { ) -> Box<dyn MacResult + 'cx> {
let mut exprs = match get_exprs_from_tts(cx, tts) { let mut exprs = match get_exprs_from_tts(cx, tts) {
Ok(exprs) if exprs.is_empty() || exprs.len() > 2 => { Ok(exprs) if exprs.is_empty() || exprs.len() > 2 => {
let guar = cx.dcx().emit_err(errors::EnvTakesArgs { span: sp }); let guar = cx.dcx().emit_err(errors::EnvTakesArgs { span: sp });
@ -93,10 +97,8 @@ pub fn expand_env<'cx>(
cx.sess.parse_sess.env_depinfo.borrow_mut().insert((var, value)); cx.sess.parse_sess.env_depinfo.borrow_mut().insert((var, value));
let e = match value { let e = match value {
None => { None => {
let ast::ExprKind::Lit(ast::token::Lit { let ExprKind::Lit(token::Lit {
kind: ast::token::LitKind::Str | ast::token::LitKind::StrRaw(..), kind: LitKind::Str | LitKind::StrRaw(..), symbol, ..
symbol,
..
}) = &var_expr.kind }) = &var_expr.kind
else { else {
unreachable!("`expr_to_string` ensures this is a string lit") unreachable!("`expr_to_string` ensures this is a string lit")

View file

@ -3,7 +3,10 @@ use rustc_ast::ptr::P;
use rustc_ast::token; use rustc_ast::token;
use rustc_ast::tokenstream::TokenStream; use rustc_ast::tokenstream::TokenStream;
use rustc_ast_pretty::pprust; use rustc_ast_pretty::pprust;
use rustc_expand::base::{self, *}; use rustc_expand::base::{
check_zero_tts, get_single_str_from_tts, parse_expr, resolve_path, DummyResult, ExtCtxt,
MacEager, MacResult,
};
use rustc_expand::module::DirOwnership; use rustc_expand::module::DirOwnership;
use rustc_parse::new_parser_from_file; use rustc_parse::new_parser_from_file;
use rustc_parse::parser::{ForceCollect, Parser}; use rustc_parse::parser::{ForceCollect, Parser};
@ -23,14 +26,14 @@ pub fn expand_line(
cx: &mut ExtCtxt<'_>, cx: &mut ExtCtxt<'_>,
sp: Span, sp: Span,
tts: TokenStream, tts: TokenStream,
) -> Box<dyn base::MacResult + 'static> { ) -> Box<dyn MacResult + 'static> {
let sp = cx.with_def_site_ctxt(sp); let sp = cx.with_def_site_ctxt(sp);
base::check_zero_tts(cx, sp, tts, "line!"); check_zero_tts(cx, sp, tts, "line!");
let topmost = cx.expansion_cause().unwrap_or(sp); let topmost = cx.expansion_cause().unwrap_or(sp);
let loc = cx.source_map().lookup_char_pos(topmost.lo()); let loc = cx.source_map().lookup_char_pos(topmost.lo());
base::MacEager::expr(cx.expr_u32(topmost, loc.line as u32)) MacEager::expr(cx.expr_u32(topmost, loc.line as u32))
} }
/* column!(): expands to the current column number */ /* column!(): expands to the current column number */
@ -38,14 +41,14 @@ pub fn expand_column(
cx: &mut ExtCtxt<'_>, cx: &mut ExtCtxt<'_>,
sp: Span, sp: Span,
tts: TokenStream, tts: TokenStream,
) -> Box<dyn base::MacResult + 'static> { ) -> Box<dyn MacResult + 'static> {
let sp = cx.with_def_site_ctxt(sp); let sp = cx.with_def_site_ctxt(sp);
base::check_zero_tts(cx, sp, tts, "column!"); check_zero_tts(cx, sp, tts, "column!");
let topmost = cx.expansion_cause().unwrap_or(sp); let topmost = cx.expansion_cause().unwrap_or(sp);
let loc = cx.source_map().lookup_char_pos(topmost.lo()); let loc = cx.source_map().lookup_char_pos(topmost.lo());
base::MacEager::expr(cx.expr_u32(topmost, loc.col.to_usize() as u32 + 1)) MacEager::expr(cx.expr_u32(topmost, loc.col.to_usize() as u32 + 1))
} }
/// file!(): expands to the current filename */ /// file!(): expands to the current filename */
@ -55,15 +58,15 @@ pub fn expand_file(
cx: &mut ExtCtxt<'_>, cx: &mut ExtCtxt<'_>,
sp: Span, sp: Span,
tts: TokenStream, tts: TokenStream,
) -> Box<dyn base::MacResult + 'static> { ) -> Box<dyn MacResult + 'static> {
let sp = cx.with_def_site_ctxt(sp); let sp = cx.with_def_site_ctxt(sp);
base::check_zero_tts(cx, sp, tts, "file!"); check_zero_tts(cx, sp, tts, "file!");
let topmost = cx.expansion_cause().unwrap_or(sp); let topmost = cx.expansion_cause().unwrap_or(sp);
let loc = cx.source_map().lookup_char_pos(topmost.lo()); let loc = cx.source_map().lookup_char_pos(topmost.lo());
use rustc_session::{config::RemapPathScopeComponents, RemapFileNameExt}; use rustc_session::{config::RemapPathScopeComponents, RemapFileNameExt};
base::MacEager::expr(cx.expr_str( MacEager::expr(cx.expr_str(
topmost, topmost,
Symbol::intern( Symbol::intern(
&loc.file.name.for_scope(cx.sess, RemapPathScopeComponents::MACRO).to_string_lossy(), &loc.file.name.for_scope(cx.sess, RemapPathScopeComponents::MACRO).to_string_lossy(),
@ -75,23 +78,23 @@ pub fn expand_stringify(
cx: &mut ExtCtxt<'_>, cx: &mut ExtCtxt<'_>,
sp: Span, sp: Span,
tts: TokenStream, tts: TokenStream,
) -> Box<dyn base::MacResult + 'static> { ) -> Box<dyn MacResult + 'static> {
let sp = cx.with_def_site_ctxt(sp); let sp = cx.with_def_site_ctxt(sp);
let s = pprust::tts_to_string(&tts); let s = pprust::tts_to_string(&tts);
base::MacEager::expr(cx.expr_str(sp, Symbol::intern(&s))) MacEager::expr(cx.expr_str(sp, Symbol::intern(&s)))
} }
pub fn expand_mod( pub fn expand_mod(
cx: &mut ExtCtxt<'_>, cx: &mut ExtCtxt<'_>,
sp: Span, sp: Span,
tts: TokenStream, tts: TokenStream,
) -> Box<dyn base::MacResult + 'static> { ) -> Box<dyn MacResult + 'static> {
let sp = cx.with_def_site_ctxt(sp); let sp = cx.with_def_site_ctxt(sp);
base::check_zero_tts(cx, sp, tts, "module_path!"); check_zero_tts(cx, sp, tts, "module_path!");
let mod_path = &cx.current_expansion.module.mod_path; let mod_path = &cx.current_expansion.module.mod_path;
let string = mod_path.iter().map(|x| x.to_string()).collect::<Vec<String>>().join("::"); let string = mod_path.iter().map(|x| x.to_string()).collect::<Vec<String>>().join("::");
base::MacEager::expr(cx.expr_str(sp, Symbol::intern(&string))) MacEager::expr(cx.expr_str(sp, Symbol::intern(&string)))
} }
/// include! : parse the given file as an expr /// include! : parse the given file as an expr
@ -101,7 +104,7 @@ pub fn expand_include<'cx>(
cx: &'cx mut ExtCtxt<'_>, cx: &'cx mut ExtCtxt<'_>,
sp: Span, sp: Span,
tts: TokenStream, tts: TokenStream,
) -> Box<dyn base::MacResult + 'cx> { ) -> Box<dyn MacResult + 'cx> {
let sp = cx.with_def_site_ctxt(sp); let sp = cx.with_def_site_ctxt(sp);
let file = match get_single_str_from_tts(cx, sp, tts, "include!") { let file = match get_single_str_from_tts(cx, sp, tts, "include!") {
Ok(file) => file, Ok(file) => file,
@ -129,9 +132,9 @@ pub fn expand_include<'cx>(
p: Parser<'a>, p: Parser<'a>,
node_id: ast::NodeId, node_id: ast::NodeId,
} }
impl<'a> base::MacResult for ExpandResult<'a> { impl<'a> MacResult for ExpandResult<'a> {
fn make_expr(mut self: Box<ExpandResult<'a>>) -> Option<P<ast::Expr>> { fn make_expr(mut self: Box<ExpandResult<'a>>) -> Option<P<ast::Expr>> {
let expr = base::parse_expr(&mut self.p).ok()?; let expr = parse_expr(&mut self.p).ok()?;
if self.p.token != token::Eof { if self.p.token != token::Eof {
self.p.sess.buffer_lint( self.p.sess.buffer_lint(
INCOMPLETE_INCLUDE, INCOMPLETE_INCLUDE,
@ -175,7 +178,7 @@ pub fn expand_include_str(
cx: &mut ExtCtxt<'_>, cx: &mut ExtCtxt<'_>,
sp: Span, sp: Span,
tts: TokenStream, tts: TokenStream,
) -> Box<dyn base::MacResult + 'static> { ) -> Box<dyn MacResult + 'static> {
let sp = cx.with_def_site_ctxt(sp); let sp = cx.with_def_site_ctxt(sp);
let file = match get_single_str_from_tts(cx, sp, tts, "include_str!") { let file = match get_single_str_from_tts(cx, sp, tts, "include_str!") {
Ok(file) => file, Ok(file) => file,
@ -192,7 +195,7 @@ pub fn expand_include_str(
Ok(bytes) => match std::str::from_utf8(&bytes) { Ok(bytes) => match std::str::from_utf8(&bytes) {
Ok(src) => { Ok(src) => {
let interned_src = Symbol::intern(src); let interned_src = Symbol::intern(src);
base::MacEager::expr(cx.expr_str(sp, interned_src)) MacEager::expr(cx.expr_str(sp, interned_src))
} }
Err(_) => { Err(_) => {
let guar = cx.dcx().span_err(sp, format!("{} wasn't a utf-8 file", file.display())); let guar = cx.dcx().span_err(sp, format!("{} wasn't a utf-8 file", file.display()));
@ -210,7 +213,7 @@ pub fn expand_include_bytes(
cx: &mut ExtCtxt<'_>, cx: &mut ExtCtxt<'_>,
sp: Span, sp: Span,
tts: TokenStream, tts: TokenStream,
) -> Box<dyn base::MacResult + 'static> { ) -> Box<dyn MacResult + 'static> {
let sp = cx.with_def_site_ctxt(sp); let sp = cx.with_def_site_ctxt(sp);
let file = match get_single_str_from_tts(cx, sp, tts, "include_bytes!") { let file = match get_single_str_from_tts(cx, sp, tts, "include_bytes!") {
Ok(file) => file, Ok(file) => file,
@ -226,7 +229,7 @@ pub fn expand_include_bytes(
match cx.source_map().load_binary_file(&file) { match cx.source_map().load_binary_file(&file) {
Ok(bytes) => { Ok(bytes) => {
let expr = cx.expr(sp, ast::ExprKind::IncludedBytes(bytes)); let expr = cx.expr(sp, ast::ExprKind::IncludedBytes(bytes));
base::MacEager::expr(expr) MacEager::expr(expr)
} }
Err(e) => { Err(e) => {
let guar = cx.dcx().span_err(sp, format!("couldn't read {}: {}", file.display(), e)); let guar = cx.dcx().span_err(sp, format!("couldn't read {}: {}", file.display(), e));