add new enum ast::StrStyle as field to ast::lit_str
For the benefit of the pretty printer we want to keep track of how string literals in the ast were originally represented in the source code. This commit changes parser functions so they don't extract strings from the token stream without at least also returning what style of string literal it was. This is stored in the resulting ast node for string literals, obviously, for the package id in `extern mod = r"package id"` view items, for the inline asm in `asm!()` invocations. For `asm!()`'s other arguments or for `extern "Rust" fn()` items, I just the style of string, because it seemed disproportionally cumbersome to thread that information through the string processing that happens with those string literals, given the limited advantage raw string literals would provide in these positions. The other syntax extensions don't seem to store passed string literals in the ast, so they also discard the style of strings they parse.
This commit is contained in:
parent
9787872553
commit
9d7b130041
25 changed files with 92 additions and 73 deletions
|
@ -407,7 +407,7 @@ fn mk_test_desc_and_fn_rec(cx: &TestCtxt, test: &Test) -> @ast::Expr {
|
||||||
debug2!("encoding {}", ast_util::path_name_i(path));
|
debug2!("encoding {}", ast_util::path_name_i(path));
|
||||||
|
|
||||||
let name_lit: ast::lit =
|
let name_lit: ast::lit =
|
||||||
nospan(ast::lit_str(ast_util::path_name_i(path).to_managed()));
|
nospan(ast::lit_str(ast_util::path_name_i(path).to_managed(), ast::CookedStr));
|
||||||
|
|
||||||
let name_expr = @ast::Expr {
|
let name_expr = @ast::Expr {
|
||||||
id: ast::DUMMY_NODE_ID,
|
id: ast::DUMMY_NODE_ID,
|
||||||
|
|
|
@ -142,7 +142,7 @@ fn visit_view_item(e: @mut Env, i: &ast::view_item) {
|
||||||
let ident = token::ident_to_str(&ident);
|
let ident = token::ident_to_str(&ident);
|
||||||
let meta_items = match path_opt {
|
let meta_items = match path_opt {
|
||||||
None => meta_items.clone(),
|
None => meta_items.clone(),
|
||||||
Some(p) => {
|
Some((p, _path_str_style)) => {
|
||||||
let p_path = Path(p);
|
let p_path = Path(p);
|
||||||
match p_path.filestem() {
|
match p_path.filestem() {
|
||||||
Some(s) =>
|
Some(s) =>
|
||||||
|
|
|
@ -1446,7 +1446,7 @@ fn encode_meta_item(ebml_w: &mut writer::Encoder, mi: @MetaItem) {
|
||||||
}
|
}
|
||||||
MetaNameValue(name, value) => {
|
MetaNameValue(name, value) => {
|
||||||
match value.node {
|
match value.node {
|
||||||
lit_str(value) => {
|
lit_str(value, _) => {
|
||||||
ebml_w.start_tag(tag_meta_item_name_value);
|
ebml_w.start_tag(tag_meta_item_name_value);
|
||||||
ebml_w.start_tag(tag_meta_item_name);
|
ebml_w.start_tag(tag_meta_item_name);
|
||||||
ebml_w.writer.write(name.as_bytes());
|
ebml_w.writer.write(name.as_bytes());
|
||||||
|
|
|
@ -86,7 +86,7 @@ pub fn check_pat(v: &mut CheckCrateVisitor, p: @Pat, _is_const: bool) {
|
||||||
match e.node {
|
match e.node {
|
||||||
ExprVstore(
|
ExprVstore(
|
||||||
@Expr { node: ExprLit(@codemap::Spanned {
|
@Expr { node: ExprLit(@codemap::Spanned {
|
||||||
node: lit_str(_),
|
node: lit_str(*),
|
||||||
_}),
|
_}),
|
||||||
_ },
|
_ },
|
||||||
ExprVstoreUniq
|
ExprVstoreUniq
|
||||||
|
@ -120,7 +120,7 @@ pub fn check_expr(v: &mut CheckCrateVisitor,
|
||||||
"disallowed operator in constant expression");
|
"disallowed operator in constant expression");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
ExprLit(@codemap::Spanned {node: lit_str(_), _}) => { }
|
ExprLit(@codemap::Spanned {node: lit_str(*), _}) => { }
|
||||||
ExprBinary(*) | ExprUnary(*) => {
|
ExprBinary(*) | ExprUnary(*) => {
|
||||||
if method_map.contains_key(&e.id) {
|
if method_map.contains_key(&e.id) {
|
||||||
sess.span_err(e.span, "user-defined operators are not \
|
sess.span_err(e.span, "user-defined operators are not \
|
||||||
|
|
|
@ -475,7 +475,7 @@ pub fn eval_const_expr_partial<T: ty::ExprTyProvider>(tcx: &T, e: &Expr)
|
||||||
|
|
||||||
pub fn lit_to_const(lit: &lit) -> const_val {
|
pub fn lit_to_const(lit: &lit) -> const_val {
|
||||||
match lit.node {
|
match lit.node {
|
||||||
lit_str(s) => const_str(s),
|
lit_str(s, _) => const_str(s),
|
||||||
lit_char(n) => const_uint(n as u64),
|
lit_char(n) => const_uint(n as u64),
|
||||||
lit_int(n, _) => const_int(n),
|
lit_int(n, _) => const_int(n),
|
||||||
lit_uint(n, _) => const_uint(n),
|
lit_uint(n, _) => const_uint(n),
|
||||||
|
|
|
@ -71,7 +71,7 @@ pub fn const_lit(cx: &mut CrateContext, e: &ast::Expr, lit: ast::lit)
|
||||||
}
|
}
|
||||||
ast::lit_bool(b) => C_bool(b),
|
ast::lit_bool(b) => C_bool(b),
|
||||||
ast::lit_nil => C_nil(),
|
ast::lit_nil => C_nil(),
|
||||||
ast::lit_str(s) => C_estr_slice(cx, s)
|
ast::lit_str(s, _) => C_estr_slice(cx, s)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -705,7 +705,7 @@ fn trans_rvalue_dps_unadjusted(bcx: @mut Block, expr: &ast::Expr,
|
||||||
args.iter().enumerate().map(|(i, arg)| (i, *arg)).collect();
|
args.iter().enumerate().map(|(i, arg)| (i, *arg)).collect();
|
||||||
return trans_adt(bcx, repr, 0, numbered_fields, None, dest);
|
return trans_adt(bcx, repr, 0, numbered_fields, None, dest);
|
||||||
}
|
}
|
||||||
ast::ExprLit(@codemap::Spanned {node: ast::lit_str(s), _}) => {
|
ast::ExprLit(@codemap::Spanned {node: ast::lit_str(s, _), _}) => {
|
||||||
return tvec::trans_lit_str(bcx, expr, s, dest);
|
return tvec::trans_lit_str(bcx, expr, s, dest);
|
||||||
}
|
}
|
||||||
ast::ExprVstore(contents, ast::ExprVstoreSlice) |
|
ast::ExprVstore(contents, ast::ExprVstoreSlice) |
|
||||||
|
|
|
@ -205,7 +205,7 @@ pub fn trans_slice_vstore(bcx: @mut Block,
|
||||||
|
|
||||||
// Handle the &"..." case:
|
// Handle the &"..." case:
|
||||||
match content_expr.node {
|
match content_expr.node {
|
||||||
ast::ExprLit(@codemap::Spanned {node: ast::lit_str(s), span: _}) => {
|
ast::ExprLit(@codemap::Spanned {node: ast::lit_str(s, _), span: _}) => {
|
||||||
return trans_lit_str(bcx, content_expr, s, dest);
|
return trans_lit_str(bcx, content_expr, s, dest);
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
|
@ -296,7 +296,7 @@ pub fn trans_uniq_or_managed_vstore(bcx: @mut Block, heap: heap, vstore_expr: &a
|
||||||
heap_exchange => {
|
heap_exchange => {
|
||||||
match content_expr.node {
|
match content_expr.node {
|
||||||
ast::ExprLit(@codemap::Spanned {
|
ast::ExprLit(@codemap::Spanned {
|
||||||
node: ast::lit_str(s), span
|
node: ast::lit_str(s, _), span
|
||||||
}) => {
|
}) => {
|
||||||
let llptrval = C_cstr(bcx.ccx(), s);
|
let llptrval = C_cstr(bcx.ccx(), s);
|
||||||
let llptrval = PointerCast(bcx, llptrval, Type::i8p());
|
let llptrval = PointerCast(bcx, llptrval, Type::i8p());
|
||||||
|
@ -357,7 +357,7 @@ pub fn write_content(bcx: @mut Block,
|
||||||
let _indenter = indenter();
|
let _indenter = indenter();
|
||||||
|
|
||||||
match content_expr.node {
|
match content_expr.node {
|
||||||
ast::ExprLit(@codemap::Spanned { node: ast::lit_str(s), _ }) => {
|
ast::ExprLit(@codemap::Spanned { node: ast::lit_str(s, _), _ }) => {
|
||||||
match dest {
|
match dest {
|
||||||
Ignore => {
|
Ignore => {
|
||||||
return bcx;
|
return bcx;
|
||||||
|
@ -490,7 +490,7 @@ pub fn elements_required(bcx: @mut Block, content_expr: &ast::Expr) -> uint {
|
||||||
//! Figure out the number of elements we need to store this content
|
//! Figure out the number of elements we need to store this content
|
||||||
|
|
||||||
match content_expr.node {
|
match content_expr.node {
|
||||||
ast::ExprLit(@codemap::Spanned { node: ast::lit_str(s), _ }) => {
|
ast::ExprLit(@codemap::Spanned { node: ast::lit_str(s, _), _ }) => {
|
||||||
s.len()
|
s.len()
|
||||||
},
|
},
|
||||||
ast::ExprVec(ref es, _) => es.len(),
|
ast::ExprVec(ref es, _) => es.len(),
|
||||||
|
|
|
@ -3266,7 +3266,7 @@ pub fn expr_kind(tcx: ctxt,
|
||||||
ast::ExprDoBody(*) |
|
ast::ExprDoBody(*) |
|
||||||
ast::ExprBlock(*) |
|
ast::ExprBlock(*) |
|
||||||
ast::ExprRepeat(*) |
|
ast::ExprRepeat(*) |
|
||||||
ast::ExprLit(@codemap::Spanned {node: lit_str(_), _}) |
|
ast::ExprLit(@codemap::Spanned {node: lit_str(*), _}) |
|
||||||
ast::ExprVstore(_, ast::ExprVstoreSlice) |
|
ast::ExprVstore(_, ast::ExprVstoreSlice) |
|
||||||
ast::ExprVstore(_, ast::ExprVstoreMutSlice) |
|
ast::ExprVstore(_, ast::ExprVstoreMutSlice) |
|
||||||
ast::ExprVec(*) => {
|
ast::ExprVec(*) => {
|
||||||
|
|
|
@ -2259,7 +2259,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
|
||||||
match expr.node {
|
match expr.node {
|
||||||
ast::ExprVstore(ev, vst) => {
|
ast::ExprVstore(ev, vst) => {
|
||||||
let typ = match ev.node {
|
let typ = match ev.node {
|
||||||
ast::ExprLit(@codemap::Spanned { node: ast::lit_str(_), _ }) => {
|
ast::ExprLit(@codemap::Spanned { node: ast::lit_str(*), _ }) => {
|
||||||
let tt = ast_expr_vstore_to_vstore(fcx, ev, vst);
|
let tt = ast_expr_vstore_to_vstore(fcx, ev, vst);
|
||||||
ty::mk_estr(tcx, tt)
|
ty::mk_estr(tcx, tt)
|
||||||
}
|
}
|
||||||
|
|
|
@ -1008,7 +1008,7 @@ impl Clean<ViewItemInner> for ast::view_item_ {
|
||||||
fn clean(&self) -> ViewItemInner {
|
fn clean(&self) -> ViewItemInner {
|
||||||
match self {
|
match self {
|
||||||
&ast::view_item_extern_mod(ref i, ref p, ref mi, ref id) =>
|
&ast::view_item_extern_mod(ref i, ref p, ref mi, ref id) =>
|
||||||
ExternMod(i.clean(), p.map(|x| x.to_owned()), mi.clean(), *id),
|
ExternMod(i.clean(), p.map(|&(ref x, _)| x.to_owned()), mi.clean(), *id),
|
||||||
&ast::view_item_use(ref vp) => Import(vp.clean())
|
&ast::view_item_use(ref vp) => Import(vp.clean())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1114,7 +1114,7 @@ impl ToSource for syntax::codemap::Span {
|
||||||
|
|
||||||
fn lit_to_str(lit: &ast::lit) -> ~str {
|
fn lit_to_str(lit: &ast::lit) -> ~str {
|
||||||
match lit.node {
|
match lit.node {
|
||||||
ast::lit_str(st) => st.to_owned(),
|
ast::lit_str(st, _) => st.to_owned(),
|
||||||
ast::lit_char(c) => ~"'" + std::char::from_u32(c).unwrap().to_str() + "'",
|
ast::lit_char(c) => ~"'" + std::char::from_u32(c).unwrap().to_str() + "'",
|
||||||
ast::lit_int(i, _t) => i.to_str(),
|
ast::lit_int(i, _t) => i.to_str(),
|
||||||
ast::lit_uint(u, _t) => u.to_str(),
|
ast::lit_uint(u, _t) => u.to_str(),
|
||||||
|
|
|
@ -406,7 +406,7 @@ impl<'self> Visitor<()> for ViewItemVisitor<'self> {
|
||||||
// ignore metadata, I guess
|
// ignore metadata, I guess
|
||||||
ast::view_item_extern_mod(lib_ident, path_opt, _, _) => {
|
ast::view_item_extern_mod(lib_ident, path_opt, _, _) => {
|
||||||
let lib_name = match path_opt {
|
let lib_name = match path_opt {
|
||||||
Some(p) => p,
|
Some((p, _)) => p,
|
||||||
None => self.sess.str_of(lib_ident)
|
None => self.sess.str_of(lib_ident)
|
||||||
};
|
};
|
||||||
debug2!("Finding and installing... {}", lib_name);
|
debug2!("Finding and installing... {}", lib_name);
|
||||||
|
@ -513,7 +513,7 @@ pub fn find_and_install_dependencies(context: &BuildContext,
|
||||||
|
|
||||||
pub fn mk_string_lit(s: @str) -> ast::lit {
|
pub fn mk_string_lit(s: @str) -> ast::lit {
|
||||||
Spanned {
|
Spanned {
|
||||||
node: ast::lit_str(s),
|
node: ast::lit_str(s, ast::CookedStr),
|
||||||
span: dummy_sp()
|
span: dummy_sp()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -680,11 +680,17 @@ pub enum mac_ {
|
||||||
mac_invoc_tt(Path,~[token_tree],SyntaxContext), // new macro-invocation
|
mac_invoc_tt(Path,~[token_tree],SyntaxContext), // new macro-invocation
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[deriving(Clone, Eq, Encodable, Decodable, IterBytes)]
|
||||||
|
pub enum StrStyle {
|
||||||
|
CookedStr,
|
||||||
|
RawStr(uint)
|
||||||
|
}
|
||||||
|
|
||||||
pub type lit = Spanned<lit_>;
|
pub type lit = Spanned<lit_>;
|
||||||
|
|
||||||
#[deriving(Clone, Eq, Encodable, Decodable, IterBytes)]
|
#[deriving(Clone, Eq, Encodable, Decodable, IterBytes)]
|
||||||
pub enum lit_ {
|
pub enum lit_ {
|
||||||
lit_str(@str),
|
lit_str(@str, StrStyle),
|
||||||
lit_char(u32),
|
lit_char(u32),
|
||||||
lit_int(i64, int_ty),
|
lit_int(i64, int_ty),
|
||||||
lit_uint(u64, uint_ty),
|
lit_uint(u64, uint_ty),
|
||||||
|
@ -862,6 +868,7 @@ pub enum asm_dialect {
|
||||||
#[deriving(Clone, Eq, Encodable, Decodable, IterBytes)]
|
#[deriving(Clone, Eq, Encodable, Decodable, IterBytes)]
|
||||||
pub struct inline_asm {
|
pub struct inline_asm {
|
||||||
asm: @str,
|
asm: @str,
|
||||||
|
asm_str_style: StrStyle,
|
||||||
clobbers: @str,
|
clobbers: @str,
|
||||||
inputs: ~[(@str, @Expr)],
|
inputs: ~[(@str, @Expr)],
|
||||||
outputs: ~[(@str, @Expr)],
|
outputs: ~[(@str, @Expr)],
|
||||||
|
@ -1027,7 +1034,7 @@ pub enum view_item_ {
|
||||||
// optional @str: if present, this is a location (containing
|
// optional @str: if present, this is a location (containing
|
||||||
// arbitrary characters) from which to fetch the crate sources
|
// arbitrary characters) from which to fetch the crate sources
|
||||||
// For example, extern mod whatever = "github.com/mozilla/rust"
|
// For example, extern mod whatever = "github.com/mozilla/rust"
|
||||||
view_item_extern_mod(Ident, Option<@str>, ~[@MetaItem], NodeId),
|
view_item_extern_mod(Ident, Option<(@str, StrStyle)>, ~[@MetaItem], NodeId),
|
||||||
view_item_use(~[@view_path]),
|
view_item_use(~[@view_path]),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -67,7 +67,7 @@ impl AttrMetaMethods for MetaItem {
|
||||||
match self.node {
|
match self.node {
|
||||||
MetaNameValue(_, ref v) => {
|
MetaNameValue(_, ref v) => {
|
||||||
match v.node {
|
match v.node {
|
||||||
ast::lit_str(s) => Some(s),
|
ast::lit_str(s, _) => Some(s),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -127,7 +127,7 @@ impl AttributeMethods for Attribute {
|
||||||
/* Constructors */
|
/* Constructors */
|
||||||
|
|
||||||
pub fn mk_name_value_item_str(name: @str, value: @str) -> @MetaItem {
|
pub fn mk_name_value_item_str(name: @str, value: @str) -> @MetaItem {
|
||||||
let value_lit = dummy_spanned(ast::lit_str(value));
|
let value_lit = dummy_spanned(ast::lit_str(value, ast::CookedStr));
|
||||||
mk_name_value_item(name, value_lit)
|
mk_name_value_item(name, value_lit)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -153,7 +153,7 @@ pub fn mk_attr(item: @MetaItem) -> Attribute {
|
||||||
|
|
||||||
pub fn mk_sugared_doc_attr(text: @str, lo: BytePos, hi: BytePos) -> Attribute {
|
pub fn mk_sugared_doc_attr(text: @str, lo: BytePos, hi: BytePos) -> Attribute {
|
||||||
let style = doc_comment_style(text);
|
let style = doc_comment_style(text);
|
||||||
let lit = spanned(lo, hi, ast::lit_str(text));
|
let lit = spanned(lo, hi, ast::lit_str(text, ast::CookedStr));
|
||||||
let attr = Attribute_ {
|
let attr = Attribute_ {
|
||||||
style: style,
|
style: style,
|
||||||
value: @spanned(lo, hi, MetaNameValue(@"doc", lit)),
|
value: @spanned(lo, hi, MetaNameValue(@"doc", lit)),
|
||||||
|
|
|
@ -44,6 +44,7 @@ pub fn expand_asm(cx: @ExtCtxt, sp: Span, tts: &[ast::token_tree])
|
||||||
tts.to_owned());
|
tts.to_owned());
|
||||||
|
|
||||||
let mut asm = @"";
|
let mut asm = @"";
|
||||||
|
let mut asm_str_style = None;
|
||||||
let mut outputs = ~[];
|
let mut outputs = ~[];
|
||||||
let mut inputs = ~[];
|
let mut inputs = ~[];
|
||||||
let mut cons = ~"";
|
let mut cons = ~"";
|
||||||
|
@ -58,8 +59,11 @@ pub fn expand_asm(cx: @ExtCtxt, sp: Span, tts: &[ast::token_tree])
|
||||||
while continue_ {
|
while continue_ {
|
||||||
match state {
|
match state {
|
||||||
Asm => {
|
Asm => {
|
||||||
asm = expr_to_str(cx, p.parse_expr(),
|
let (s, style) =
|
||||||
"inline assembly must be a string literal.");
|
expr_to_str(cx, p.parse_expr(),
|
||||||
|
"inline assembly must be a string literal.");
|
||||||
|
asm = s;
|
||||||
|
asm_str_style = Some(style);
|
||||||
}
|
}
|
||||||
Outputs => {
|
Outputs => {
|
||||||
while *p.token != token::EOF &&
|
while *p.token != token::EOF &&
|
||||||
|
@ -70,7 +74,7 @@ pub fn expand_asm(cx: @ExtCtxt, sp: Span, tts: &[ast::token_tree])
|
||||||
p.eat(&token::COMMA);
|
p.eat(&token::COMMA);
|
||||||
}
|
}
|
||||||
|
|
||||||
let constraint = p.parse_str();
|
let (constraint, _str_style) = p.parse_str();
|
||||||
p.expect(&token::LPAREN);
|
p.expect(&token::LPAREN);
|
||||||
let out = p.parse_expr();
|
let out = p.parse_expr();
|
||||||
p.expect(&token::RPAREN);
|
p.expect(&token::RPAREN);
|
||||||
|
@ -93,7 +97,7 @@ pub fn expand_asm(cx: @ExtCtxt, sp: Span, tts: &[ast::token_tree])
|
||||||
p.eat(&token::COMMA);
|
p.eat(&token::COMMA);
|
||||||
}
|
}
|
||||||
|
|
||||||
let constraint = p.parse_str();
|
let (constraint, _str_style) = p.parse_str();
|
||||||
p.expect(&token::LPAREN);
|
p.expect(&token::LPAREN);
|
||||||
let input = p.parse_expr();
|
let input = p.parse_expr();
|
||||||
p.expect(&token::RPAREN);
|
p.expect(&token::RPAREN);
|
||||||
|
@ -111,14 +115,15 @@ pub fn expand_asm(cx: @ExtCtxt, sp: Span, tts: &[ast::token_tree])
|
||||||
p.eat(&token::COMMA);
|
p.eat(&token::COMMA);
|
||||||
}
|
}
|
||||||
|
|
||||||
let clob = format!("~\\{{}\\}", p.parse_str());
|
let (s, _str_style) = p.parse_str();
|
||||||
|
let clob = format!("~\\{{}\\}", s);
|
||||||
clobs.push(clob);
|
clobs.push(clob);
|
||||||
}
|
}
|
||||||
|
|
||||||
cons = clobs.connect(",");
|
cons = clobs.connect(",");
|
||||||
}
|
}
|
||||||
Options => {
|
Options => {
|
||||||
let option = p.parse_str();
|
let (option, _str_style) = p.parse_str();
|
||||||
|
|
||||||
if "volatile" == option {
|
if "volatile" == option {
|
||||||
volatile = true;
|
volatile = true;
|
||||||
|
@ -175,6 +180,7 @@ pub fn expand_asm(cx: @ExtCtxt, sp: Span, tts: &[ast::token_tree])
|
||||||
id: ast::DUMMY_NODE_ID,
|
id: ast::DUMMY_NODE_ID,
|
||||||
node: ast::ExprInlineAsm(ast::inline_asm {
|
node: ast::ExprInlineAsm(ast::inline_asm {
|
||||||
asm: asm,
|
asm: asm,
|
||||||
|
asm_str_style: asm_str_style.unwrap(),
|
||||||
clobbers: cons.to_managed(),
|
clobbers: cons.to_managed(),
|
||||||
inputs: inputs,
|
inputs: inputs,
|
||||||
outputs: outputs,
|
outputs: outputs,
|
||||||
|
|
|
@ -410,10 +410,10 @@ impl ExtCtxt {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expr_to_str(cx: @ExtCtxt, expr: @ast::Expr, err_msg: &str) -> @str {
|
pub fn expr_to_str(cx: @ExtCtxt, expr: @ast::Expr, err_msg: &str) -> (@str, ast::StrStyle) {
|
||||||
match expr.node {
|
match expr.node {
|
||||||
ast::ExprLit(l) => match l.node {
|
ast::ExprLit(l) => match l.node {
|
||||||
ast::lit_str(s) => s,
|
ast::lit_str(s, style) => (s, style),
|
||||||
_ => cx.span_fatal(l.span, err_msg)
|
_ => cx.span_fatal(l.span, err_msg)
|
||||||
},
|
},
|
||||||
_ => cx.span_fatal(expr.span, err_msg)
|
_ => cx.span_fatal(expr.span, err_msg)
|
||||||
|
|
|
@ -562,7 +562,7 @@ impl AstBuilder for @ExtCtxt {
|
||||||
self.expr_vstore(sp, self.expr_vec(sp, exprs), ast::ExprVstoreSlice)
|
self.expr_vstore(sp, self.expr_vec(sp, exprs), ast::ExprVstoreSlice)
|
||||||
}
|
}
|
||||||
fn expr_str(&self, sp: Span, s: @str) -> @ast::Expr {
|
fn expr_str(&self, sp: Span, s: @str) -> @ast::Expr {
|
||||||
self.expr_lit(sp, ast::lit_str(s))
|
self.expr_lit(sp, ast::lit_str(s, ast::CookedStr))
|
||||||
}
|
}
|
||||||
fn expr_str_uniq(&self, sp: Span, s: @str) -> @ast::Expr {
|
fn expr_str_uniq(&self, sp: Span, s: @str) -> @ast::Expr {
|
||||||
self.expr_vstore(sp, self.expr_str(sp, s), ast::ExprVstoreUniq)
|
self.expr_vstore(sp, self.expr_str(sp, s), ast::ExprVstoreUniq)
|
||||||
|
|
|
@ -28,7 +28,7 @@ pub fn expand_syntax_ext(cx: @ExtCtxt, sp: Span, tts: &[ast::token_tree]) -> bas
|
||||||
// expression is a literal
|
// expression is a literal
|
||||||
ast::ExprLit(lit) => match lit.node {
|
ast::ExprLit(lit) => match lit.node {
|
||||||
// string literal, push each byte to vector expression
|
// string literal, push each byte to vector expression
|
||||||
ast::lit_str(s) => {
|
ast::lit_str(s, _) => {
|
||||||
for byte in s.byte_iter() {
|
for byte in s.byte_iter() {
|
||||||
bytes.push(cx.expr_u8(expr.span, byte));
|
bytes.push(cx.expr_u8(expr.span, byte));
|
||||||
}
|
}
|
||||||
|
|
|
@ -361,7 +361,7 @@ impl<'self> TraitDef<'self> {
|
||||||
span,
|
span,
|
||||||
cx.meta_name_value(span,
|
cx.meta_name_value(span,
|
||||||
@"doc",
|
@"doc",
|
||||||
ast::lit_str(@"Automatically derived.")));
|
ast::lit_str(@"Automatically derived.", ast::CookedStr)));
|
||||||
cx.item(
|
cx.item(
|
||||||
span,
|
span,
|
||||||
::parse::token::special_idents::clownshoes_extensions,
|
::parse::token::special_idents::clownshoes_extensions,
|
||||||
|
|
|
@ -41,10 +41,13 @@ pub fn expand_env(cx: @ExtCtxt, sp: Span, tts: &[ast::token_tree])
|
||||||
cx.span_fatal(sp, "env! takes 1 or 2 arguments");
|
cx.span_fatal(sp, "env! takes 1 or 2 arguments");
|
||||||
}
|
}
|
||||||
|
|
||||||
let var = expr_to_str(cx, exprs[0], "expected string literal");
|
let (var, _var_str_style) = expr_to_str(cx, exprs[0], "expected string literal");
|
||||||
let msg = match exprs.len() {
|
let msg = match exprs.len() {
|
||||||
1 => format!("Environment variable {} not defined", var).to_managed(),
|
1 => format!("Environment variable {} not defined", var).to_managed(),
|
||||||
2 => expr_to_str(cx, exprs[1], "expected string literal"),
|
2 => {
|
||||||
|
let (s, _style) = expr_to_str(cx, exprs[1], "expected string literal");
|
||||||
|
s
|
||||||
|
}
|
||||||
_ => cx.span_fatal(sp, "env! takes 1 or 2 arguments")
|
_ => cx.span_fatal(sp, "env! takes 1 or 2 arguments")
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -30,7 +30,7 @@ pub fn expand_syntax_ext(cx: @ExtCtxt, sp: Span, tts: &[ast::token_tree])
|
||||||
if args.len() == 0 {
|
if args.len() == 0 {
|
||||||
cx.span_fatal(sp, "fmt! takes at least 1 argument.");
|
cx.span_fatal(sp, "fmt! takes at least 1 argument.");
|
||||||
}
|
}
|
||||||
let fmt =
|
let (fmt, _fmt_str_style) =
|
||||||
expr_to_str(cx, args[0],
|
expr_to_str(cx, args[0],
|
||||||
"first argument to fmt! must be a string literal.");
|
"first argument to fmt! must be a string literal.");
|
||||||
let fmtspan = args[0].span;
|
let fmtspan = args[0].span;
|
||||||
|
|
|
@ -722,8 +722,8 @@ pub fn expand_args(ecx: @ExtCtxt, sp: Span,
|
||||||
(_, None) => { return MRExpr(ecx.expr_uint(sp, 2)); }
|
(_, None) => { return MRExpr(ecx.expr_uint(sp, 2)); }
|
||||||
};
|
};
|
||||||
cx.fmtsp = efmt.span;
|
cx.fmtsp = efmt.span;
|
||||||
let fmt = expr_to_str(ecx, efmt,
|
let (fmt, _fmt_str_style) = expr_to_str(ecx, efmt,
|
||||||
"format argument must be a string literal.");
|
"format argument must be a string literal.");
|
||||||
|
|
||||||
let mut err = false;
|
let mut err = false;
|
||||||
do parse::parse_error::cond.trap(|m| {
|
do parse::parse_error::cond.trap(|m| {
|
||||||
|
|
|
@ -118,7 +118,7 @@ pub mod rt {
|
||||||
|
|
||||||
impl<'self> ToSource for &'self str {
|
impl<'self> ToSource for &'self str {
|
||||||
fn to_source(&self) -> @str {
|
fn to_source(&self) -> @str {
|
||||||
let lit = dummy_spanned(ast::lit_str(self.to_managed()));
|
let lit = dummy_spanned(ast::lit_str(self.to_managed(), ast::CookedStr));
|
||||||
pprust::lit_to_str(@lit).to_managed()
|
pprust::lit_to_str(@lit).to_managed()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -48,6 +48,7 @@ use ast::{BiRem, required};
|
||||||
use ast::{ret_style, return_val, BiShl, BiShr, Stmt, StmtDecl};
|
use ast::{ret_style, return_val, BiShl, BiShr, Stmt, StmtDecl};
|
||||||
use ast::{StmtExpr, StmtSemi, StmtMac, struct_def, struct_field};
|
use ast::{StmtExpr, StmtSemi, StmtMac, struct_def, struct_field};
|
||||||
use ast::{struct_variant_kind, BiSub};
|
use ast::{struct_variant_kind, BiSub};
|
||||||
|
use ast::StrStyle;
|
||||||
use ast::{sty_box, sty_region, sty_static, sty_uniq, sty_value};
|
use ast::{sty_box, sty_region, sty_static, sty_uniq, sty_value};
|
||||||
use ast::{token_tree, trait_method, trait_ref, tt_delim, tt_seq, tt_tok};
|
use ast::{token_tree, trait_method, trait_ref, tt_delim, tt_seq, tt_tok};
|
||||||
use ast::{tt_nonterminal, tuple_variant_kind, Ty, ty_, ty_bot, ty_box};
|
use ast::{tt_nonterminal, tuple_variant_kind, Ty, ty_, ty_bot, ty_box};
|
||||||
|
@ -1282,8 +1283,8 @@ impl Parser {
|
||||||
token::LIT_FLOAT(s, ft) => lit_float(self.id_to_str(s), ft),
|
token::LIT_FLOAT(s, ft) => lit_float(self.id_to_str(s), ft),
|
||||||
token::LIT_FLOAT_UNSUFFIXED(s) =>
|
token::LIT_FLOAT_UNSUFFIXED(s) =>
|
||||||
lit_float_unsuffixed(self.id_to_str(s)),
|
lit_float_unsuffixed(self.id_to_str(s)),
|
||||||
token::LIT_STR(s) => lit_str(self.id_to_str(s)),
|
token::LIT_STR(s) => lit_str(self.id_to_str(s), ast::CookedStr),
|
||||||
token::LIT_STR_RAW(s, _) => lit_str(self.id_to_str(s)),
|
token::LIT_STR_RAW(s, n) => lit_str(self.id_to_str(s), ast::RawStr(n)),
|
||||||
token::LPAREN => { self.expect(&token::RPAREN); lit_nil },
|
token::LPAREN => { self.expect(&token::RPAREN); lit_nil },
|
||||||
_ => { self.unexpected_last(tok); }
|
_ => { self.unexpected_last(tok); }
|
||||||
}
|
}
|
||||||
|
@ -2158,7 +2159,7 @@ impl Parser {
|
||||||
// HACK: turn &[...] into a &-evec
|
// HACK: turn &[...] into a &-evec
|
||||||
ex = match e.node {
|
ex = match e.node {
|
||||||
ExprVec(*) | ExprLit(@codemap::Spanned {
|
ExprVec(*) | ExprLit(@codemap::Spanned {
|
||||||
node: lit_str(_), span: _
|
node: lit_str(*), span: _
|
||||||
})
|
})
|
||||||
if m == MutImmutable => {
|
if m == MutImmutable => {
|
||||||
ExprVstore(e, ExprVstoreSlice)
|
ExprVstore(e, ExprVstoreSlice)
|
||||||
|
@ -2182,7 +2183,7 @@ impl Parser {
|
||||||
ExprVec(*) | ExprRepeat(*) if m == MutMutable =>
|
ExprVec(*) | ExprRepeat(*) if m == MutMutable =>
|
||||||
ExprVstore(e, ExprVstoreMutBox),
|
ExprVstore(e, ExprVstoreMutBox),
|
||||||
ExprVec(*) |
|
ExprVec(*) |
|
||||||
ExprLit(@codemap::Spanned { node: lit_str(_), span: _}) |
|
ExprLit(@codemap::Spanned { node: lit_str(*), span: _}) |
|
||||||
ExprRepeat(*) if m == MutImmutable => ExprVstore(e, ExprVstoreBox),
|
ExprRepeat(*) if m == MutImmutable => ExprVstore(e, ExprVstoreBox),
|
||||||
_ => self.mk_unary(UnBox(m), e)
|
_ => self.mk_unary(UnBox(m), e)
|
||||||
};
|
};
|
||||||
|
@ -2195,7 +2196,7 @@ impl Parser {
|
||||||
// HACK: turn ~[...] into a ~-evec
|
// HACK: turn ~[...] into a ~-evec
|
||||||
ex = match e.node {
|
ex = match e.node {
|
||||||
ExprVec(*) |
|
ExprVec(*) |
|
||||||
ExprLit(@codemap::Spanned { node: lit_str(_), span: _}) |
|
ExprLit(@codemap::Spanned { node: lit_str(*), span: _}) |
|
||||||
ExprRepeat(*) => ExprVstore(e, ExprVstoreUniq),
|
ExprRepeat(*) => ExprVstore(e, ExprVstoreUniq),
|
||||||
_ => self.mk_unary(UnUniq, e)
|
_ => self.mk_unary(UnUniq, e)
|
||||||
};
|
};
|
||||||
|
@ -2707,7 +2708,7 @@ impl Parser {
|
||||||
pat = match sub.node {
|
pat = match sub.node {
|
||||||
PatLit(e@@Expr {
|
PatLit(e@@Expr {
|
||||||
node: ExprLit(@codemap::Spanned {
|
node: ExprLit(@codemap::Spanned {
|
||||||
node: lit_str(_),
|
node: lit_str(*),
|
||||||
span: _}), _
|
span: _}), _
|
||||||
}) => {
|
}) => {
|
||||||
let vst = @Expr {
|
let vst = @Expr {
|
||||||
|
@ -2735,7 +2736,7 @@ impl Parser {
|
||||||
pat = match sub.node {
|
pat = match sub.node {
|
||||||
PatLit(e@@Expr {
|
PatLit(e@@Expr {
|
||||||
node: ExprLit(@codemap::Spanned {
|
node: ExprLit(@codemap::Spanned {
|
||||||
node: lit_str(_),
|
node: lit_str(*),
|
||||||
span: _}), _
|
span: _}), _
|
||||||
}) => {
|
}) => {
|
||||||
let vst = @Expr {
|
let vst = @Expr {
|
||||||
|
@ -2764,7 +2765,7 @@ impl Parser {
|
||||||
pat = match sub.node {
|
pat = match sub.node {
|
||||||
PatLit(e@@Expr {
|
PatLit(e@@Expr {
|
||||||
node: ExprLit(@codemap::Spanned {
|
node: ExprLit(@codemap::Spanned {
|
||||||
node: lit_str(_), span: _}), _
|
node: lit_str(*), span: _}), _
|
||||||
}) => {
|
}) => {
|
||||||
let vst = @Expr {
|
let vst = @Expr {
|
||||||
id: ast::DUMMY_NODE_ID,
|
id: ast::DUMMY_NODE_ID,
|
||||||
|
@ -4373,15 +4374,15 @@ impl Parser {
|
||||||
abi::all_names().connect(", "),
|
abi::all_names().connect(", "),
|
||||||
word));
|
word));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Some(abis)
|
Some(abis)
|
||||||
}
|
}
|
||||||
|
|
||||||
_ => {
|
_ => {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// parse one of the items or view items allowed by the
|
// parse one of the items or view items allowed by the
|
||||||
|
@ -4932,18 +4933,17 @@ impl Parser {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_optional_str(&self) -> Option<@str> {
|
pub fn parse_optional_str(&self) -> Option<(@str, ast::StrStyle)> {
|
||||||
match *self.token {
|
let (s, style) = match *self.token {
|
||||||
token::LIT_STR(s)
|
token::LIT_STR(s) => (s, ast::CookedStr),
|
||||||
| token::LIT_STR_RAW(s, _) => {
|
token::LIT_STR_RAW(s, n) => (s, ast::RawStr(n)),
|
||||||
self.bump();
|
_ => return None
|
||||||
Some(ident_to_str(&s))
|
};
|
||||||
}
|
self.bump();
|
||||||
_ => None
|
Some((ident_to_str(&s), style))
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_str(&self) -> @str {
|
pub fn parse_str(&self) -> (@str, StrStyle) {
|
||||||
match self.parse_optional_str() {
|
match self.parse_optional_str() {
|
||||||
Some(s) => { s }
|
Some(s) => { s }
|
||||||
_ => self.fatal("expected string literal")
|
_ => self.fatal("expected string literal")
|
||||||
|
|
|
@ -1433,10 +1433,10 @@ pub fn print_expr(s: @ps, expr: &ast::Expr) {
|
||||||
word(s.s, "asm!");
|
word(s.s, "asm!");
|
||||||
}
|
}
|
||||||
popen(s);
|
popen(s);
|
||||||
print_string(s, a.asm);
|
print_string(s, a.asm, a.asm_str_style);
|
||||||
word_space(s, ":");
|
word_space(s, ":");
|
||||||
for &(co, o) in a.outputs.iter() {
|
for &(co, o) in a.outputs.iter() {
|
||||||
print_string(s, co);
|
print_string(s, co, ast::CookedStr);
|
||||||
popen(s);
|
popen(s);
|
||||||
print_expr(s, o);
|
print_expr(s, o);
|
||||||
pclose(s);
|
pclose(s);
|
||||||
|
@ -1444,14 +1444,14 @@ pub fn print_expr(s: @ps, expr: &ast::Expr) {
|
||||||
}
|
}
|
||||||
word_space(s, ":");
|
word_space(s, ":");
|
||||||
for &(co, o) in a.inputs.iter() {
|
for &(co, o) in a.inputs.iter() {
|
||||||
print_string(s, co);
|
print_string(s, co, ast::CookedStr);
|
||||||
popen(s);
|
popen(s);
|
||||||
print_expr(s, o);
|
print_expr(s, o);
|
||||||
pclose(s);
|
pclose(s);
|
||||||
word_space(s, ",");
|
word_space(s, ",");
|
||||||
}
|
}
|
||||||
word_space(s, ":");
|
word_space(s, ":");
|
||||||
print_string(s, a.clobbers);
|
print_string(s, a.clobbers, ast::CookedStr);
|
||||||
pclose(s);
|
pclose(s);
|
||||||
}
|
}
|
||||||
ast::ExprMac(ref m) => print_mac(s, m),
|
ast::ExprMac(ref m) => print_mac(s, m),
|
||||||
|
@ -1894,11 +1894,11 @@ pub fn print_view_item(s: @ps, item: &ast::view_item) {
|
||||||
ast::view_item_extern_mod(id, ref optional_path, ref mta, _) => {
|
ast::view_item_extern_mod(id, ref optional_path, ref mta, _) => {
|
||||||
head(s, "extern mod");
|
head(s, "extern mod");
|
||||||
print_ident(s, id);
|
print_ident(s, id);
|
||||||
for p in optional_path.iter() {
|
for &(ref p, style) in optional_path.iter() {
|
||||||
space(s.s);
|
space(s.s);
|
||||||
word(s.s, "=");
|
word(s.s, "=");
|
||||||
space(s.s);
|
space(s.s);
|
||||||
print_string(s, *p);
|
print_string(s, *p, style);
|
||||||
}
|
}
|
||||||
if !mta.is_empty() {
|
if !mta.is_empty() {
|
||||||
popen(s);
|
popen(s);
|
||||||
|
@ -2060,7 +2060,7 @@ pub fn print_literal(s: @ps, lit: &ast::lit) {
|
||||||
_ => ()
|
_ => ()
|
||||||
}
|
}
|
||||||
match lit.node {
|
match lit.node {
|
||||||
ast::lit_str(st) => print_string(s, st),
|
ast::lit_str(st, style) => print_string(s, st, style),
|
||||||
ast::lit_char(ch) => {
|
ast::lit_char(ch) => {
|
||||||
let mut res = ~"'";
|
let mut res = ~"'";
|
||||||
do char::from_u32(ch).unwrap().escape_default |c| {
|
do char::from_u32(ch).unwrap().escape_default |c| {
|
||||||
|
@ -2180,10 +2180,13 @@ pub fn print_comment(s: @ps, cmnt: &comments::cmnt) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn print_string(s: @ps, st: &str) {
|
pub fn print_string(s: @ps, st: &str, style: ast::StrStyle) {
|
||||||
word(s.s, "\"");
|
let st = match style {
|
||||||
word(s.s, st.escape_default());
|
ast::CookedStr => format!("\"{}\"", st.escape_default()),
|
||||||
word(s.s, "\"");
|
ast::RawStr(n) => format!("r{delim}\"{string}\"{delim}",
|
||||||
|
delim="#".repeat(n), string=st)
|
||||||
|
};
|
||||||
|
word(s.s, st);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn to_str<T>(t: &T, f: &fn(@ps, &T), intr: @ident_interner) -> ~str {
|
pub fn to_str<T>(t: &T, f: &fn(@ps, &T), intr: @ident_interner) -> ~str {
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue