Implement the new parsing rules for types in the parser, modifying the AST appropriately.
This commit is contained in:
parent
8fb027e398
commit
74a1041a4d
7 changed files with 263 additions and 235 deletions
|
@ -145,5 +145,7 @@ register_diagnostics!(
|
||||||
E0166,
|
E0166,
|
||||||
E0167,
|
E0167,
|
||||||
E0168,
|
E0168,
|
||||||
E0169
|
E0169,
|
||||||
|
E0170,
|
||||||
|
E0171
|
||||||
)
|
)
|
||||||
|
|
|
@ -1396,29 +1396,53 @@ impl<'a> Resolver<'a> {
|
||||||
// methods within to a new module, if the type was defined
|
// methods within to a new module, if the type was defined
|
||||||
// within this module.
|
// within this module.
|
||||||
|
|
||||||
// Create the module and add all methods.
|
let mod_name = match ty.node {
|
||||||
match ty.node {
|
TyPath(ref path, _) if path.segments.len() == 1 => {
|
||||||
TyPath(ref path, _, _) if path.segments.len() == 1 => {
|
|
||||||
// FIXME(18446) we should distinguish between the name of
|
// FIXME(18446) we should distinguish between the name of
|
||||||
// a trait and the name of an impl of that trait.
|
// a trait and the name of an impl of that trait.
|
||||||
let mod_name = path.segments.last().unwrap().identifier.name;
|
Some(path.segments.last().unwrap().identifier.name)
|
||||||
|
}
|
||||||
|
TyObjectSum(ref lhs_ty, _) => {
|
||||||
|
match lhs_ty.node {
|
||||||
|
TyPath(ref path, _) if path.segments.len() == 1 => {
|
||||||
|
Some(path.segments.last().unwrap().identifier.name)
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
match mod_name {
|
||||||
|
None => {
|
||||||
|
self.resolve_error(ty.span,
|
||||||
|
"inherent implementations may \
|
||||||
|
only be implemented in the same \
|
||||||
|
module as the type they are \
|
||||||
|
implemented for")
|
||||||
|
}
|
||||||
|
Some(mod_name) => {
|
||||||
|
// Create the module and add all methods.
|
||||||
let parent_opt = parent.module().children.borrow()
|
let parent_opt = parent.module().children.borrow()
|
||||||
.get(&mod_name).cloned();
|
.get(&mod_name).cloned();
|
||||||
let new_parent = match parent_opt {
|
let new_parent = match parent_opt {
|
||||||
// It already exists
|
// It already exists
|
||||||
Some(ref child) if child.get_module_if_available()
|
Some(ref child) if child.get_module_if_available()
|
||||||
.is_some() &&
|
.is_some() &&
|
||||||
(child.get_module().kind.get() == ImplModuleKind ||
|
(child.get_module().kind.get() == ImplModuleKind ||
|
||||||
child.get_module().kind.get() == TraitModuleKind) => {
|
child.get_module().kind.get() == TraitModuleKind) => {
|
||||||
ModuleReducedGraphParent(child.get_module())
|
ModuleReducedGraphParent(child.get_module())
|
||||||
}
|
}
|
||||||
Some(ref child) if child.get_module_if_available()
|
Some(ref child) if child.get_module_if_available()
|
||||||
.is_some() &&
|
.is_some() &&
|
||||||
child.get_module().kind.get() ==
|
child.get_module().kind.get() ==
|
||||||
EnumModuleKind => {
|
EnumModuleKind => {
|
||||||
ModuleReducedGraphParent(child.get_module())
|
ModuleReducedGraphParent(child.get_module())
|
||||||
}
|
}
|
||||||
// Create the module
|
// Create the module
|
||||||
_ => {
|
_ => {
|
||||||
let name_bindings =
|
let name_bindings =
|
||||||
|
@ -1433,7 +1457,7 @@ impl<'a> Resolver<'a> {
|
||||||
let ns = TypeNS;
|
let ns = TypeNS;
|
||||||
let is_public =
|
let is_public =
|
||||||
!name_bindings.defined_in_namespace(ns) ||
|
!name_bindings.defined_in_namespace(ns) ||
|
||||||
name_bindings.defined_in_public_namespace(ns);
|
name_bindings.defined_in_public_namespace(ns);
|
||||||
|
|
||||||
name_bindings.define_module(parent_link,
|
name_bindings.define_module(parent_link,
|
||||||
Some(def_id),
|
Some(def_id),
|
||||||
|
@ -1459,21 +1483,21 @@ impl<'a> Resolver<'a> {
|
||||||
ForbidDuplicateValues,
|
ForbidDuplicateValues,
|
||||||
method.span);
|
method.span);
|
||||||
let def = match method.pe_explicit_self()
|
let def = match method.pe_explicit_self()
|
||||||
.node {
|
.node {
|
||||||
SelfStatic => {
|
SelfStatic => {
|
||||||
// Static methods become
|
// Static methods become
|
||||||
// `DefStaticMethod`s.
|
// `DefStaticMethod`s.
|
||||||
DefStaticMethod(local_def(method.id),
|
DefStaticMethod(local_def(method.id),
|
||||||
FromImpl(local_def(item.id)))
|
FromImpl(local_def(item.id)))
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
// Non-static methods become
|
// Non-static methods become
|
||||||
// `DefMethod`s.
|
// `DefMethod`s.
|
||||||
DefMethod(local_def(method.id),
|
DefMethod(local_def(method.id),
|
||||||
None,
|
None,
|
||||||
FromImpl(local_def(item.id)))
|
FromImpl(local_def(item.id)))
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// NB: not IMPORTABLE
|
// NB: not IMPORTABLE
|
||||||
let modifiers = if method.pe_vis() == ast::Public {
|
let modifiers = if method.pe_vis() == ast::Public {
|
||||||
|
@ -1496,7 +1520,7 @@ impl<'a> Resolver<'a> {
|
||||||
ForbidDuplicateTypesAndModules,
|
ForbidDuplicateTypesAndModules,
|
||||||
typedef.span);
|
typedef.span);
|
||||||
let def = DefAssociatedTy(local_def(
|
let def = DefAssociatedTy(local_def(
|
||||||
typedef.id));
|
typedef.id));
|
||||||
// NB: not IMPORTABLE
|
// NB: not IMPORTABLE
|
||||||
let modifiers = if typedef.vis == ast::Public {
|
let modifiers = if typedef.vis == ast::Public {
|
||||||
PUBLIC
|
PUBLIC
|
||||||
|
@ -1511,13 +1535,6 @@ impl<'a> Resolver<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => {
|
|
||||||
self.resolve_error(ty.span,
|
|
||||||
"inherent implementations may \
|
|
||||||
only be implemented in the same \
|
|
||||||
module as the type they are \
|
|
||||||
implemented for")
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
parent
|
parent
|
||||||
|
|
|
@ -59,8 +59,9 @@ use middle::typeck::rscope::{UnelidableRscope, RegionScope, SpecificRscope,
|
||||||
ShiftedRscope, BindingRscope};
|
ShiftedRscope, BindingRscope};
|
||||||
use middle::typeck::rscope;
|
use middle::typeck::rscope;
|
||||||
use middle::typeck::TypeAndSubsts;
|
use middle::typeck::TypeAndSubsts;
|
||||||
|
use util::common::ErrorReported;
|
||||||
use util::nodemap::DefIdMap;
|
use util::nodemap::DefIdMap;
|
||||||
use util::ppaux::{Repr, UserString};
|
use util::ppaux::{mod, Repr, UserString};
|
||||||
|
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
use std::iter::AdditiveIterator;
|
use std::iter::AdditiveIterator;
|
||||||
|
@ -585,7 +586,7 @@ fn check_path_args(tcx: &ty::ctxt,
|
||||||
pub fn ast_ty_to_prim_ty<'tcx>(tcx: &ty::ctxt<'tcx>, ast_ty: &ast::Ty)
|
pub fn ast_ty_to_prim_ty<'tcx>(tcx: &ty::ctxt<'tcx>, ast_ty: &ast::Ty)
|
||||||
-> Option<Ty<'tcx>> {
|
-> Option<Ty<'tcx>> {
|
||||||
match ast_ty.node {
|
match ast_ty.node {
|
||||||
ast::TyPath(ref path, _, id) => {
|
ast::TyPath(ref path, id) => {
|
||||||
let a_def = match tcx.def_map.borrow().get(&id) {
|
let a_def = match tcx.def_map.borrow().get(&id) {
|
||||||
None => {
|
None => {
|
||||||
tcx.sess.span_bug(ast_ty.span,
|
tcx.sess.span_bug(ast_ty.span,
|
||||||
|
@ -642,7 +643,7 @@ pub fn ast_ty_to_builtin_ty<'tcx, AC: AstConv<'tcx>, RS: RegionScope>(
|
||||||
}
|
}
|
||||||
|
|
||||||
match ast_ty.node {
|
match ast_ty.node {
|
||||||
ast::TyPath(ref path, _, id) => {
|
ast::TyPath(ref path, id) => {
|
||||||
let a_def = match this.tcx().def_map.borrow().get(&id) {
|
let a_def = match this.tcx().def_map.borrow().get(&id) {
|
||||||
None => {
|
None => {
|
||||||
this.tcx()
|
this.tcx()
|
||||||
|
@ -682,64 +683,92 @@ pub fn ast_ty_to_builtin_ty<'tcx, AC: AstConv<'tcx>, RS: RegionScope>(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Handle `~`, `Box`, and `&` being able to mean strs and vecs.
|
fn ast_ty_to_trait_ref<'tcx,AC,RS>(this: &AC,
|
||||||
// If a_seq_ty is a str or a vec, make it a str/vec.
|
rscope: &RS,
|
||||||
// Also handle first-class trait types.
|
ty: &ast::Ty,
|
||||||
fn mk_pointer<'tcx, AC: AstConv<'tcx>, RS: RegionScope>(
|
bounds: &[ast::TyParamBound])
|
||||||
this: &AC,
|
-> Result<ty::TraitRef<'tcx>, ErrorReported>
|
||||||
rscope: &RS,
|
where AC : AstConv<'tcx>, RS : RegionScope
|
||||||
a_seq_mutbl: ast::Mutability,
|
|
||||||
a_seq_ty: &ast::Ty,
|
|
||||||
region: ty::Region,
|
|
||||||
constr: |Ty<'tcx>| -> Ty<'tcx>)
|
|
||||||
-> Ty<'tcx>
|
|
||||||
{
|
{
|
||||||
let tcx = this.tcx();
|
/*!
|
||||||
|
* In a type like `Foo + Send`, we want to wait to collect the
|
||||||
|
* full set of bounds before we make the object type, because we
|
||||||
|
* need them to infer a region bound. (For example, if we tried
|
||||||
|
* made a type from just `Foo`, then it wouldn't be enough to
|
||||||
|
* infer a 'static bound, and hence the user would get an error.)
|
||||||
|
* So this function is used when we're dealing with a sum type to
|
||||||
|
* convert the LHS. It only accepts a type that refers to a trait
|
||||||
|
* name, and reports an error otherwise.
|
||||||
|
*/
|
||||||
|
|
||||||
debug!("mk_pointer(region={}, a_seq_ty={})",
|
match ty.node {
|
||||||
region,
|
ast::TyPath(ref path, id) => {
|
||||||
a_seq_ty.repr(tcx));
|
match this.tcx().def_map.borrow().get(&id) {
|
||||||
|
|
||||||
match a_seq_ty.node {
|
|
||||||
ast::TyVec(ref ty) => {
|
|
||||||
let ty = ast_ty_to_ty(this, rscope, &**ty);
|
|
||||||
return constr(ty::mk_vec(tcx, ty, None));
|
|
||||||
}
|
|
||||||
ast::TyPath(ref path, ref opt_bounds, id) => {
|
|
||||||
// Note that the "bounds must be empty if path is not a trait"
|
|
||||||
// restriction is enforced in the below case for ty_path, which
|
|
||||||
// will run after this as long as the path isn't a trait.
|
|
||||||
match tcx.def_map.borrow().get(&id) {
|
|
||||||
Some(&def::DefPrimTy(ast::TyStr)) => {
|
|
||||||
check_path_args(tcx, path, NO_TPS | NO_REGIONS);
|
|
||||||
return ty::mk_str_slice(tcx, region, a_seq_mutbl);
|
|
||||||
}
|
|
||||||
Some(&def::DefTrait(trait_def_id)) => {
|
Some(&def::DefTrait(trait_def_id)) => {
|
||||||
let result = ast_path_to_trait_ref(this,
|
return Ok(ast_path_to_trait_ref(this,
|
||||||
rscope,
|
rscope,
|
||||||
trait_def_id,
|
trait_def_id,
|
||||||
None,
|
None,
|
||||||
path);
|
path));
|
||||||
let empty_vec = [];
|
}
|
||||||
let bounds = match *opt_bounds { None => empty_vec.as_slice(),
|
_ => {
|
||||||
Some(ref bounds) => bounds.as_slice() };
|
span_err!(this.tcx().sess, ty.span, E0170, "expected a reference to a trait");
|
||||||
let existential_bounds = conv_existential_bounds(this,
|
Err(ErrorReported)
|
||||||
rscope,
|
|
||||||
path.span,
|
|
||||||
&[Rc::new(result.clone())],
|
|
||||||
bounds);
|
|
||||||
let tr = ty::mk_trait(tcx,
|
|
||||||
result,
|
|
||||||
existential_bounds);
|
|
||||||
return ty::mk_rptr(tcx, region, ty::mt{mutbl: a_seq_mutbl, ty: tr});
|
|
||||||
}
|
}
|
||||||
_ => {}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {
|
||||||
|
span_err!(this.tcx().sess, ty.span, E0171,
|
||||||
|
"expected a path on the left-hand side of `+`, not `{}`",
|
||||||
|
pprust::ty_to_string(ty));
|
||||||
|
match ty.node {
|
||||||
|
ast::TyRptr(None, ref mut_ty) => {
|
||||||
|
span_note!(this.tcx().sess, ty.span,
|
||||||
|
"perhaps you meant `&{}({} +{})`? (per RFC 248)",
|
||||||
|
ppaux::mutability_to_string(mut_ty.mutbl),
|
||||||
|
pprust::ty_to_string(&*mut_ty.ty),
|
||||||
|
pprust::bounds_to_string(bounds));
|
||||||
|
}
|
||||||
|
|
||||||
|
ast::TyRptr(Some(ref lt), ref mut_ty) => {
|
||||||
|
span_note!(this.tcx().sess, ty.span,
|
||||||
|
"perhaps you meant `&{} {}({} +{})`? (per RFC 248)",
|
||||||
|
pprust::lifetime_to_string(lt),
|
||||||
|
ppaux::mutability_to_string(mut_ty.mutbl),
|
||||||
|
pprust::ty_to_string(&*mut_ty.ty),
|
||||||
|
pprust::bounds_to_string(bounds));
|
||||||
|
}
|
||||||
|
|
||||||
|
_ => {
|
||||||
|
span_note!(this.tcx().sess, ty.span,
|
||||||
|
"perhaps you forget parentheses? (per RFC 248)");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(ErrorReported)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
constr(ast_ty_to_ty(this, rscope, a_seq_ty))
|
}
|
||||||
|
|
||||||
|
fn trait_ref_to_object_type<'tcx,AC,RS>(this: &AC,
|
||||||
|
rscope: &RS,
|
||||||
|
span: Span,
|
||||||
|
trait_ref: ty::TraitRef<'tcx>,
|
||||||
|
bounds: &[ast::TyParamBound])
|
||||||
|
-> Ty<'tcx>
|
||||||
|
where AC : AstConv<'tcx>, RS : RegionScope
|
||||||
|
{
|
||||||
|
let existential_bounds = conv_existential_bounds(this,
|
||||||
|
rscope,
|
||||||
|
span,
|
||||||
|
&[Rc::new(trait_ref.clone())],
|
||||||
|
bounds);
|
||||||
|
|
||||||
|
let result = ty::mk_trait(this.tcx(), trait_ref, existential_bounds);
|
||||||
|
debug!("trait_ref_to_object_type: result={}",
|
||||||
|
result.repr(this.tcx()));
|
||||||
|
|
||||||
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
fn qpath_to_ty<'tcx,AC,RS>(this: &AC,
|
fn qpath_to_ty<'tcx,AC,RS>(this: &AC,
|
||||||
|
@ -806,6 +835,17 @@ pub fn ast_ty_to_ty<'tcx, AC: AstConv<'tcx>, RS: RegionScope>(
|
||||||
ast::TyVec(ref ty) => {
|
ast::TyVec(ref ty) => {
|
||||||
ty::mk_vec(tcx, ast_ty_to_ty(this, rscope, &**ty), None)
|
ty::mk_vec(tcx, ast_ty_to_ty(this, rscope, &**ty), None)
|
||||||
}
|
}
|
||||||
|
ast::TyObjectSum(ref ty, ref bounds) => {
|
||||||
|
match ast_ty_to_trait_ref(this, rscope, &**ty, bounds.as_slice()) {
|
||||||
|
Ok(trait_ref) => {
|
||||||
|
trait_ref_to_object_type(this, rscope, ast_ty.span,
|
||||||
|
trait_ref, bounds.as_slice())
|
||||||
|
}
|
||||||
|
Err(ErrorReported) => {
|
||||||
|
ty::mk_err()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
ast::TyPtr(ref mt) => {
|
ast::TyPtr(ref mt) => {
|
||||||
ty::mk_ptr(tcx, ty::mt {
|
ty::mk_ptr(tcx, ty::mt {
|
||||||
ty: ast_ty_to_ty(this, rscope, &*mt.ty),
|
ty: ast_ty_to_ty(this, rscope, &*mt.ty),
|
||||||
|
@ -815,8 +855,8 @@ pub fn ast_ty_to_ty<'tcx, AC: AstConv<'tcx>, RS: RegionScope>(
|
||||||
ast::TyRptr(ref region, ref mt) => {
|
ast::TyRptr(ref region, ref mt) => {
|
||||||
let r = opt_ast_region_to_region(this, rscope, ast_ty.span, region);
|
let r = opt_ast_region_to_region(this, rscope, ast_ty.span, region);
|
||||||
debug!("ty_rptr r={}", r.repr(this.tcx()));
|
debug!("ty_rptr r={}", r.repr(this.tcx()));
|
||||||
mk_pointer(this, rscope, mt.mutbl, &*mt.ty, r,
|
let t = ast_ty_to_ty(this, rscope, &*mt.ty);
|
||||||
|ty| ty::mk_rptr(tcx, r, ty::mt {ty: ty, mutbl: mt.mutbl}))
|
ty::mk_rptr(tcx, r, ty::mt {ty: t, mutbl: mt.mutbl})
|
||||||
}
|
}
|
||||||
ast::TyTup(ref fields) => {
|
ast::TyTup(ref fields) => {
|
||||||
let flds = fields.iter()
|
let flds = fields.iter()
|
||||||
|
@ -874,7 +914,7 @@ pub fn ast_ty_to_ty<'tcx, AC: AstConv<'tcx>, RS: RegionScope>(
|
||||||
ast::TyPolyTraitRef(ref bounds) => {
|
ast::TyPolyTraitRef(ref bounds) => {
|
||||||
conv_ty_poly_trait_ref(this, rscope, ast_ty.span, bounds.as_slice())
|
conv_ty_poly_trait_ref(this, rscope, ast_ty.span, bounds.as_slice())
|
||||||
}
|
}
|
||||||
ast::TyPath(ref path, ref bounds, id) => {
|
ast::TyPath(ref path, id) => {
|
||||||
let a_def = match tcx.def_map.borrow().get(&id) {
|
let a_def = match tcx.def_map.borrow().get(&id) {
|
||||||
None => {
|
None => {
|
||||||
tcx.sess
|
tcx.sess
|
||||||
|
@ -884,35 +924,16 @@ pub fn ast_ty_to_ty<'tcx, AC: AstConv<'tcx>, RS: RegionScope>(
|
||||||
}
|
}
|
||||||
Some(&d) => d
|
Some(&d) => d
|
||||||
};
|
};
|
||||||
// Kind bounds on path types are only supported for traits.
|
|
||||||
match a_def {
|
|
||||||
// But don't emit the error if the user meant to do a trait anyway.
|
|
||||||
def::DefTrait(..) => { },
|
|
||||||
_ if bounds.is_some() =>
|
|
||||||
tcx.sess.span_err(ast_ty.span,
|
|
||||||
"kind bounds can only be used on trait types"),
|
|
||||||
_ => { },
|
|
||||||
}
|
|
||||||
match a_def {
|
match a_def {
|
||||||
def::DefTrait(trait_def_id) => {
|
def::DefTrait(trait_def_id) => {
|
||||||
|
// N.B. this case overlaps somewhat with
|
||||||
|
// TyObjectSum, see that fn for details
|
||||||
let result = ast_path_to_trait_ref(this,
|
let result = ast_path_to_trait_ref(this,
|
||||||
rscope,
|
rscope,
|
||||||
trait_def_id,
|
trait_def_id,
|
||||||
None,
|
None,
|
||||||
path);
|
path);
|
||||||
let empty_bounds: &[ast::TyParamBound] = &[];
|
trait_ref_to_object_type(this, rscope, path.span, result, &[])
|
||||||
let ast_bounds = match *bounds {
|
|
||||||
Some(ref b) => b.as_slice(),
|
|
||||||
None => empty_bounds
|
|
||||||
};
|
|
||||||
let bounds = conv_existential_bounds(this,
|
|
||||||
rscope,
|
|
||||||
ast_ty.span,
|
|
||||||
&[Rc::new(result.clone())],
|
|
||||||
ast_bounds);
|
|
||||||
let result_ty = ty::mk_trait(tcx, result, bounds);
|
|
||||||
debug!("ast_ty_to_ty: result_ty={}", result_ty.repr(this.tcx()));
|
|
||||||
result_ty
|
|
||||||
}
|
}
|
||||||
def::DefTy(did, _) | def::DefStruct(did) => {
|
def::DefTy(did, _) | def::DefStruct(did) => {
|
||||||
ast_path_to_ty(this, rscope, did, path).ty
|
ast_path_to_ty(this, rscope, did, path).ty
|
||||||
|
|
|
@ -1151,7 +1151,9 @@ pub enum Ty_ {
|
||||||
/// A path (`module::module::...::Type`) or primitive
|
/// A path (`module::module::...::Type`) or primitive
|
||||||
///
|
///
|
||||||
/// Type parameters are stored in the Path itself
|
/// Type parameters are stored in the Path itself
|
||||||
TyPath(Path, Option<TyParamBounds>, NodeId), // for #7264; see above
|
TyPath(Path, NodeId),
|
||||||
|
/// Something like `A+B`. Note that `B` must always be a path.
|
||||||
|
TyObjectSum(P<Ty>, TyParamBounds),
|
||||||
/// A type like `for<'a> Foo<&'a Bar>`
|
/// A type like `for<'a> Foo<&'a Bar>`
|
||||||
TyPolyTraitRef(TyParamBounds),
|
TyPolyTraitRef(TyParamBounds),
|
||||||
/// A "qualified path", e.g. `<Vec<T> as SomeTrait>::SomeType`
|
/// A "qualified path", e.g. `<Vec<T> as SomeTrait>::SomeType`
|
||||||
|
|
|
@ -1029,7 +1029,7 @@ mod test {
|
||||||
parameters: ast::PathParameters::none(),
|
parameters: ast::PathParameters::none(),
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
}, None, ast::DUMMY_NODE_ID),
|
}, ast::DUMMY_NODE_ID),
|
||||||
span:sp(10,13)
|
span:sp(10,13)
|
||||||
}),
|
}),
|
||||||
pat: P(ast::Pat {
|
pat: P(ast::Pat {
|
||||||
|
|
|
@ -111,16 +111,6 @@ pub enum PathParsingMode {
|
||||||
/// A path with a lifetime and type parameters with double colons before
|
/// A path with a lifetime and type parameters with double colons before
|
||||||
/// the type parameters; e.g. `foo::bar::<'a>::Baz::<T>`
|
/// the type parameters; e.g. `foo::bar::<'a>::Baz::<T>`
|
||||||
LifetimeAndTypesWithColons,
|
LifetimeAndTypesWithColons,
|
||||||
/// A path with a lifetime and type parameters with bounds before the last
|
|
||||||
/// set of type parameters only; e.g. `foo::bar<'a>::Baz+X+Y<T>` This
|
|
||||||
/// form does not use extra double colons.
|
|
||||||
LifetimeAndTypesAndBounds,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A path paired with optional type bounds.
|
|
||||||
pub struct PathAndBounds {
|
|
||||||
pub path: ast::Path,
|
|
||||||
pub bounds: Option<ast::TyParamBounds>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
enum ItemOrViewItem {
|
enum ItemOrViewItem {
|
||||||
|
@ -1053,17 +1043,9 @@ impl<'a> Parser<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_ty_path(&mut self, plus_allowed: bool) -> Ty_ {
|
pub fn parse_ty_path(&mut self) -> Ty_ {
|
||||||
let mode = if plus_allowed {
|
let path = self.parse_path(LifetimeAndTypesWithoutColons);
|
||||||
LifetimeAndTypesAndBounds
|
TyPath(path, ast::DUMMY_NODE_ID)
|
||||||
} else {
|
|
||||||
LifetimeAndTypesWithoutColons
|
|
||||||
};
|
|
||||||
let PathAndBounds {
|
|
||||||
path,
|
|
||||||
bounds
|
|
||||||
} = self.parse_path(mode);
|
|
||||||
TyPath(path, bounds, ast::DUMMY_NODE_ID)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// parse a TyBareFn type:
|
/// parse a TyBareFn type:
|
||||||
|
@ -1286,7 +1268,7 @@ impl<'a> Parser<'a> {
|
||||||
let lo = self.span.lo;
|
let lo = self.span.lo;
|
||||||
let ident = self.parse_ident();
|
let ident = self.parse_ident();
|
||||||
self.expect(&token::Eq);
|
self.expect(&token::Eq);
|
||||||
let typ = self.parse_ty(true);
|
let typ = self.parse_ty_sum();
|
||||||
let hi = self.span.hi;
|
let hi = self.span.hi;
|
||||||
self.expect(&token::Semi);
|
self.expect(&token::Semi);
|
||||||
Typedef {
|
Typedef {
|
||||||
|
@ -1385,7 +1367,7 @@ impl<'a> Parser<'a> {
|
||||||
/// Parse a possibly mutable type
|
/// Parse a possibly mutable type
|
||||||
pub fn parse_mt(&mut self) -> MutTy {
|
pub fn parse_mt(&mut self) -> MutTy {
|
||||||
let mutbl = self.parse_mutability();
|
let mutbl = self.parse_mutability();
|
||||||
let t = self.parse_ty(true);
|
let t = self.parse_ty();
|
||||||
MutTy { ty: t, mutbl: mutbl }
|
MutTy { ty: t, mutbl: mutbl }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1396,7 +1378,7 @@ impl<'a> Parser<'a> {
|
||||||
let mutbl = self.parse_mutability();
|
let mutbl = self.parse_mutability();
|
||||||
let id = self.parse_ident();
|
let id = self.parse_ident();
|
||||||
self.expect(&token::Colon);
|
self.expect(&token::Colon);
|
||||||
let ty = self.parse_ty(true);
|
let ty = self.parse_ty_sum();
|
||||||
let hi = ty.span.hi;
|
let hi = ty.span.hi;
|
||||||
ast::TypeField {
|
ast::TypeField {
|
||||||
ident: id,
|
ident: id,
|
||||||
|
@ -1411,7 +1393,19 @@ impl<'a> Parser<'a> {
|
||||||
if self.eat(&token::Not) {
|
if self.eat(&token::Not) {
|
||||||
NoReturn(self.span)
|
NoReturn(self.span)
|
||||||
} else {
|
} else {
|
||||||
Return(self.parse_ty(true))
|
let t = self.parse_ty();
|
||||||
|
|
||||||
|
// We used to allow `fn foo() -> &T + U`, but don't
|
||||||
|
// anymore. If we see it, report a useful error. This
|
||||||
|
// only makes sense because `parse_ret_ty` is only
|
||||||
|
// used in fn *declarations*, not fn types or where
|
||||||
|
// clauses (i.e., not when parsing something like
|
||||||
|
// `FnMut() -> T + Send`, where the `+` is legal).
|
||||||
|
if self.token == token::BinOp(token::Plus) {
|
||||||
|
self.warn("deprecated syntax: `()` are required, see RFC 248 for details");
|
||||||
|
}
|
||||||
|
|
||||||
|
Return(t)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
let pos = self.span.lo;
|
let pos = self.span.lo;
|
||||||
|
@ -1423,11 +1417,36 @@ impl<'a> Parser<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Parse a type in a context where `T1+T2` is allowed.
|
||||||
|
pub fn parse_ty_sum(&mut self) -> P<Ty> {
|
||||||
|
let lo = self.span.lo;
|
||||||
|
let lhs = self.parse_ty();
|
||||||
|
|
||||||
|
if !self.eat(&token::BinOp(token::Plus)) {
|
||||||
|
return lhs;
|
||||||
|
}
|
||||||
|
|
||||||
|
let bounds = self.parse_ty_param_bounds();
|
||||||
|
|
||||||
|
// In type grammar, `+` is treated like a binary operator,
|
||||||
|
// and hence both L and R side are required.
|
||||||
|
if bounds.len() == 0 {
|
||||||
|
let last_span = self.last_span;
|
||||||
|
self.span_err(last_span,
|
||||||
|
"at least one type parameter bound \
|
||||||
|
must be specified");
|
||||||
|
}
|
||||||
|
|
||||||
|
let sp = mk_sp(lo, self.last_span.hi);
|
||||||
|
let sum = ast::TyObjectSum(lhs, bounds);
|
||||||
|
P(Ty {id: ast::DUMMY_NODE_ID, node: sum, span: sp})
|
||||||
|
}
|
||||||
|
|
||||||
/// Parse a type.
|
/// Parse a type.
|
||||||
///
|
///
|
||||||
/// The second parameter specifies whether the `+` binary operator is
|
/// The second parameter specifies whether the `+` binary operator is
|
||||||
/// allowed in the type grammar.
|
/// allowed in the type grammar.
|
||||||
pub fn parse_ty(&mut self, plus_allowed: bool) -> P<Ty> {
|
pub fn parse_ty(&mut self) -> P<Ty> {
|
||||||
maybe_whole!(no_clone self, NtTy);
|
maybe_whole!(no_clone self, NtTy);
|
||||||
|
|
||||||
let lo = self.span.lo;
|
let lo = self.span.lo;
|
||||||
|
@ -1441,7 +1460,7 @@ impl<'a> Parser<'a> {
|
||||||
let mut ts = vec![];
|
let mut ts = vec![];
|
||||||
let mut last_comma = false;
|
let mut last_comma = false;
|
||||||
while self.token != token::CloseDelim(token::Paren) {
|
while self.token != token::CloseDelim(token::Paren) {
|
||||||
ts.push(self.parse_ty(true));
|
ts.push(self.parse_ty_sum());
|
||||||
if self.token == token::Comma {
|
if self.token == token::Comma {
|
||||||
last_comma = true;
|
last_comma = true;
|
||||||
self.bump();
|
self.bump();
|
||||||
|
@ -1465,7 +1484,7 @@ impl<'a> Parser<'a> {
|
||||||
token::OpenDelim(token::Bracket) => self.obsolete(last_span, ObsoleteOwnedVector),
|
token::OpenDelim(token::Bracket) => self.obsolete(last_span, ObsoleteOwnedVector),
|
||||||
_ => self.obsolete(last_span, ObsoleteOwnedType)
|
_ => self.obsolete(last_span, ObsoleteOwnedType)
|
||||||
}
|
}
|
||||||
TyTup(vec![self.parse_ty(false)])
|
TyTup(vec![self.parse_ty()])
|
||||||
} else if self.token == token::BinOp(token::Star) {
|
} else if self.token == token::BinOp(token::Star) {
|
||||||
// STAR POINTER (bare pointer?)
|
// STAR POINTER (bare pointer?)
|
||||||
self.bump();
|
self.bump();
|
||||||
|
@ -1473,7 +1492,7 @@ impl<'a> Parser<'a> {
|
||||||
} else if self.token == token::OpenDelim(token::Bracket) {
|
} else if self.token == token::OpenDelim(token::Bracket) {
|
||||||
// VECTOR
|
// VECTOR
|
||||||
self.expect(&token::OpenDelim(token::Bracket));
|
self.expect(&token::OpenDelim(token::Bracket));
|
||||||
let t = self.parse_ty(true);
|
let t = self.parse_ty_sum();
|
||||||
|
|
||||||
// Parse the `, ..e` in `[ int, ..e ]`
|
// Parse the `, ..e` in `[ int, ..e ]`
|
||||||
// where `e` is a const expression
|
// where `e` is a const expression
|
||||||
|
@ -1514,7 +1533,7 @@ impl<'a> Parser<'a> {
|
||||||
} else if self.token == token::Lt {
|
} else if self.token == token::Lt {
|
||||||
// QUALIFIED PATH `<TYPE as TRAIT_REF>::item`
|
// QUALIFIED PATH `<TYPE as TRAIT_REF>::item`
|
||||||
self.bump();
|
self.bump();
|
||||||
let self_type = self.parse_ty(true);
|
let self_type = self.parse_ty_sum();
|
||||||
self.expect_keyword(keywords::As);
|
self.expect_keyword(keywords::As);
|
||||||
let trait_ref = self.parse_trait_ref();
|
let trait_ref = self.parse_trait_ref();
|
||||||
self.expect(&token::Gt);
|
self.expect(&token::Gt);
|
||||||
|
@ -1529,7 +1548,7 @@ impl<'a> Parser<'a> {
|
||||||
self.token.is_ident() ||
|
self.token.is_ident() ||
|
||||||
self.token.is_path() {
|
self.token.is_path() {
|
||||||
// NAMED TYPE
|
// NAMED TYPE
|
||||||
self.parse_ty_path(plus_allowed)
|
self.parse_ty_path()
|
||||||
} else if self.eat(&token::Underscore) {
|
} else if self.eat(&token::Underscore) {
|
||||||
// TYPE TO BE INFERRED
|
// TYPE TO BE INFERRED
|
||||||
TyInfer
|
TyInfer
|
||||||
|
@ -1563,7 +1582,7 @@ impl<'a> Parser<'a> {
|
||||||
known as `*const T`");
|
known as `*const T`");
|
||||||
MutImmutable
|
MutImmutable
|
||||||
};
|
};
|
||||||
let t = self.parse_ty(true);
|
let t = self.parse_ty();
|
||||||
MutTy { ty: t, mutbl: mutbl }
|
MutTy { ty: t, mutbl: mutbl }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1603,7 +1622,7 @@ impl<'a> Parser<'a> {
|
||||||
special_idents::invalid)
|
special_idents::invalid)
|
||||||
};
|
};
|
||||||
|
|
||||||
let t = self.parse_ty(true);
|
let t = self.parse_ty_sum();
|
||||||
|
|
||||||
Arg {
|
Arg {
|
||||||
ty: t,
|
ty: t,
|
||||||
|
@ -1621,7 +1640,7 @@ impl<'a> Parser<'a> {
|
||||||
pub fn parse_fn_block_arg(&mut self) -> Arg {
|
pub fn parse_fn_block_arg(&mut self) -> Arg {
|
||||||
let pat = self.parse_pat();
|
let pat = self.parse_pat();
|
||||||
let t = if self.eat(&token::Colon) {
|
let t = if self.eat(&token::Colon) {
|
||||||
self.parse_ty(true)
|
self.parse_ty_sum()
|
||||||
} else {
|
} else {
|
||||||
P(Ty {
|
P(Ty {
|
||||||
id: ast::DUMMY_NODE_ID,
|
id: ast::DUMMY_NODE_ID,
|
||||||
|
@ -1739,7 +1758,7 @@ impl<'a> Parser<'a> {
|
||||||
/// mode. The `mode` parameter determines whether lifetimes, types, and/or
|
/// mode. The `mode` parameter determines whether lifetimes, types, and/or
|
||||||
/// bounds are permitted and whether `::` must precede type parameter
|
/// bounds are permitted and whether `::` must precede type parameter
|
||||||
/// groups.
|
/// groups.
|
||||||
pub fn parse_path(&mut self, mode: PathParsingMode) -> PathAndBounds {
|
pub fn parse_path(&mut self, mode: PathParsingMode) -> ast::Path {
|
||||||
// Check for a whole path...
|
// Check for a whole path...
|
||||||
let found = match self.token {
|
let found = match self.token {
|
||||||
token::Interpolated(token::NtPath(_)) => Some(self.bump_and_get()),
|
token::Interpolated(token::NtPath(_)) => Some(self.bump_and_get()),
|
||||||
|
@ -1747,10 +1766,7 @@ impl<'a> Parser<'a> {
|
||||||
};
|
};
|
||||||
match found {
|
match found {
|
||||||
Some(token::Interpolated(token::NtPath(box path))) => {
|
Some(token::Interpolated(token::NtPath(box path))) => {
|
||||||
return PathAndBounds {
|
return path;
|
||||||
path: path,
|
|
||||||
bounds: None
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
|
@ -1762,8 +1778,7 @@ impl<'a> Parser<'a> {
|
||||||
// identifier followed by an optional lifetime and a set of types.
|
// identifier followed by an optional lifetime and a set of types.
|
||||||
// A bound set is a set of type parameter bounds.
|
// A bound set is a set of type parameter bounds.
|
||||||
let segments = match mode {
|
let segments = match mode {
|
||||||
LifetimeAndTypesWithoutColons |
|
LifetimeAndTypesWithoutColons => {
|
||||||
LifetimeAndTypesAndBounds => {
|
|
||||||
self.parse_path_segments_without_colons()
|
self.parse_path_segments_without_colons()
|
||||||
}
|
}
|
||||||
LifetimeAndTypesWithColons => {
|
LifetimeAndTypesWithColons => {
|
||||||
|
@ -1774,44 +1789,14 @@ impl<'a> Parser<'a> {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// Next, parse a plus and bounded type parameters, if
|
|
||||||
// applicable. We need to remember whether the separate was
|
|
||||||
// present for later, because in some contexts it's a parse
|
|
||||||
// error.
|
|
||||||
let opt_bounds = {
|
|
||||||
if mode == LifetimeAndTypesAndBounds &&
|
|
||||||
self.eat(&token::BinOp(token::Plus))
|
|
||||||
{
|
|
||||||
let bounds = self.parse_ty_param_bounds();
|
|
||||||
|
|
||||||
// For some reason that I do not fully understand, we
|
|
||||||
// do not permit an empty list in the case where it is
|
|
||||||
// introduced by a `+`, but we do for `:` and other
|
|
||||||
// separators. -nmatsakis
|
|
||||||
if bounds.len() == 0 {
|
|
||||||
let last_span = self.last_span;
|
|
||||||
self.span_err(last_span,
|
|
||||||
"at least one type parameter bound \
|
|
||||||
must be specified");
|
|
||||||
}
|
|
||||||
|
|
||||||
Some(bounds)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Assemble the span.
|
// Assemble the span.
|
||||||
let span = mk_sp(lo, self.last_span.hi);
|
let span = mk_sp(lo, self.last_span.hi);
|
||||||
|
|
||||||
// Assemble the result.
|
// Assemble the result.
|
||||||
PathAndBounds {
|
ast::Path {
|
||||||
path: ast::Path {
|
span: span,
|
||||||
span: span,
|
global: is_global,
|
||||||
global: is_global,
|
segments: segments,
|
||||||
segments: segments,
|
|
||||||
},
|
|
||||||
bounds: opt_bounds,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1837,10 +1822,10 @@ impl<'a> Parser<'a> {
|
||||||
let inputs = self.parse_seq_to_end(
|
let inputs = self.parse_seq_to_end(
|
||||||
&token::CloseDelim(token::Paren),
|
&token::CloseDelim(token::Paren),
|
||||||
seq_sep_trailing_allowed(token::Comma),
|
seq_sep_trailing_allowed(token::Comma),
|
||||||
|p| p.parse_ty(true));
|
|p| p.parse_ty_sum());
|
||||||
|
|
||||||
let output_ty = if self.eat(&token::RArrow) {
|
let output_ty = if self.eat(&token::RArrow) {
|
||||||
Some(self.parse_ty(true))
|
Some(self.parse_ty())
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
|
@ -2327,7 +2312,7 @@ impl<'a> Parser<'a> {
|
||||||
!self.token.is_keyword(keywords::True) &&
|
!self.token.is_keyword(keywords::True) &&
|
||||||
!self.token.is_keyword(keywords::False) {
|
!self.token.is_keyword(keywords::False) {
|
||||||
let pth =
|
let pth =
|
||||||
self.parse_path(LifetimeAndTypesWithColons).path;
|
self.parse_path(LifetimeAndTypesWithColons);
|
||||||
|
|
||||||
// `!`, as an operator, is prefix, so we know this isn't that
|
// `!`, as an operator, is prefix, so we know this isn't that
|
||||||
if self.token == token::Not {
|
if self.token == token::Not {
|
||||||
|
@ -2898,7 +2883,7 @@ impl<'a> Parser<'a> {
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
if as_prec > min_prec && self.eat_keyword(keywords::As) {
|
if as_prec > min_prec && self.eat_keyword(keywords::As) {
|
||||||
let rhs = self.parse_ty(false);
|
let rhs = self.parse_ty();
|
||||||
let _as = self.mk_expr(lhs.span.lo,
|
let _as = self.mk_expr(lhs.span.lo,
|
||||||
rhs.span.hi,
|
rhs.span.hi,
|
||||||
ExprCast(lhs, rhs));
|
ExprCast(lhs, rhs));
|
||||||
|
@ -3362,8 +3347,7 @@ impl<'a> Parser<'a> {
|
||||||
}) {
|
}) {
|
||||||
self.bump();
|
self.bump();
|
||||||
let end = if self.token.is_ident() || self.token.is_path() {
|
let end = if self.token.is_ident() || self.token.is_path() {
|
||||||
let path = self.parse_path(LifetimeAndTypesWithColons)
|
let path = self.parse_path(LifetimeAndTypesWithColons);
|
||||||
.path;
|
|
||||||
let hi = self.span.hi;
|
let hi = self.span.hi;
|
||||||
self.mk_expr(lo, hi, ExprPath(path))
|
self.mk_expr(lo, hi, ExprPath(path))
|
||||||
} else {
|
} else {
|
||||||
|
@ -3433,8 +3417,7 @@ impl<'a> Parser<'a> {
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// parse an enum pat
|
// parse an enum pat
|
||||||
let enum_path = self.parse_path(LifetimeAndTypesWithColons)
|
let enum_path = self.parse_path(LifetimeAndTypesWithColons);
|
||||||
.path;
|
|
||||||
match self.token {
|
match self.token {
|
||||||
token::OpenDelim(token::Brace) => {
|
token::OpenDelim(token::Brace) => {
|
||||||
self.bump();
|
self.bump();
|
||||||
|
@ -3548,7 +3531,7 @@ impl<'a> Parser<'a> {
|
||||||
span: mk_sp(lo, lo),
|
span: mk_sp(lo, lo),
|
||||||
});
|
});
|
||||||
if self.eat(&token::Colon) {
|
if self.eat(&token::Colon) {
|
||||||
ty = self.parse_ty(true);
|
ty = self.parse_ty_sum();
|
||||||
}
|
}
|
||||||
let init = self.parse_initializer();
|
let init = self.parse_initializer();
|
||||||
P(ast::Local {
|
P(ast::Local {
|
||||||
|
@ -3577,7 +3560,7 @@ impl<'a> Parser<'a> {
|
||||||
}
|
}
|
||||||
let name = self.parse_ident();
|
let name = self.parse_ident();
|
||||||
self.expect(&token::Colon);
|
self.expect(&token::Colon);
|
||||||
let ty = self.parse_ty(true);
|
let ty = self.parse_ty_sum();
|
||||||
spanned(lo, self.last_span.hi, ast::StructField_ {
|
spanned(lo, self.last_span.hi, ast::StructField_ {
|
||||||
kind: NamedField(name, pr),
|
kind: NamedField(name, pr),
|
||||||
id: ast::DUMMY_NODE_ID,
|
id: ast::DUMMY_NODE_ID,
|
||||||
|
@ -3624,7 +3607,7 @@ impl<'a> Parser<'a> {
|
||||||
|
|
||||||
// Potential trouble: if we allow macros with paths instead of
|
// Potential trouble: if we allow macros with paths instead of
|
||||||
// idents, we'd need to look ahead past the whole path here...
|
// idents, we'd need to look ahead past the whole path here...
|
||||||
let pth = self.parse_path(NoTypesAllowed).path;
|
let pth = self.parse_path(NoTypesAllowed);
|
||||||
self.bump();
|
self.bump();
|
||||||
|
|
||||||
let id = match self.token {
|
let id = match self.token {
|
||||||
|
@ -3976,7 +3959,7 @@ impl<'a> Parser<'a> {
|
||||||
|
|
||||||
let default = if self.token == token::Eq {
|
let default = if self.token == token::Eq {
|
||||||
self.bump();
|
self.bump();
|
||||||
Some(self.parse_ty(true))
|
Some(self.parse_ty_sum())
|
||||||
}
|
}
|
||||||
else { None };
|
else { None };
|
||||||
|
|
||||||
|
@ -4032,7 +4015,7 @@ impl<'a> Parser<'a> {
|
||||||
Some(token::Comma),
|
Some(token::Comma),
|
||||||
|p| {
|
|p| {
|
||||||
p.forbid_lifetime();
|
p.forbid_lifetime();
|
||||||
p.parse_ty(true)
|
p.parse_ty_sum()
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
(lifetimes, result.into_vec())
|
(lifetimes, result.into_vec())
|
||||||
|
@ -4265,7 +4248,7 @@ impl<'a> Parser<'a> {
|
||||||
// Determine whether this is the fully explicit form, `self:
|
// Determine whether this is the fully explicit form, `self:
|
||||||
// TYPE`.
|
// TYPE`.
|
||||||
if self.eat(&token::Colon) {
|
if self.eat(&token::Colon) {
|
||||||
SelfExplicit(self.parse_ty(false), self_ident)
|
SelfExplicit(self.parse_ty_sum(), self_ident)
|
||||||
} else {
|
} else {
|
||||||
SelfValue(self_ident)
|
SelfValue(self_ident)
|
||||||
}
|
}
|
||||||
|
@ -4277,7 +4260,7 @@ impl<'a> Parser<'a> {
|
||||||
// Determine whether this is the fully explicit form,
|
// Determine whether this is the fully explicit form,
|
||||||
// `self: TYPE`.
|
// `self: TYPE`.
|
||||||
if self.eat(&token::Colon) {
|
if self.eat(&token::Colon) {
|
||||||
SelfExplicit(self.parse_ty(false), self_ident)
|
SelfExplicit(self.parse_ty_sum(), self_ident)
|
||||||
} else {
|
} else {
|
||||||
SelfValue(self_ident)
|
SelfValue(self_ident)
|
||||||
}
|
}
|
||||||
|
@ -4466,7 +4449,7 @@ impl<'a> Parser<'a> {
|
||||||
&& (self.look_ahead(2, |t| *t == token::OpenDelim(token::Paren))
|
&& (self.look_ahead(2, |t| *t == token::OpenDelim(token::Paren))
|
||||||
|| self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace))) {
|
|| self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace))) {
|
||||||
// method macro.
|
// method macro.
|
||||||
let pth = self.parse_path(NoTypesAllowed).path;
|
let pth = self.parse_path(NoTypesAllowed);
|
||||||
self.expect(&token::Not);
|
self.expect(&token::Not);
|
||||||
|
|
||||||
// eat a matched-delimiter token tree:
|
// eat a matched-delimiter token tree:
|
||||||
|
@ -4564,30 +4547,25 @@ impl<'a> Parser<'a> {
|
||||||
let could_be_trait = self.token != token::OpenDelim(token::Paren);
|
let could_be_trait = self.token != token::OpenDelim(token::Paren);
|
||||||
|
|
||||||
// Parse the trait.
|
// Parse the trait.
|
||||||
let mut ty = self.parse_ty(true);
|
let mut ty = self.parse_ty_sum();
|
||||||
|
|
||||||
// Parse traits, if necessary.
|
// Parse traits, if necessary.
|
||||||
let opt_trait = if could_be_trait && self.eat_keyword(keywords::For) {
|
let opt_trait = if could_be_trait && self.eat_keyword(keywords::For) {
|
||||||
// New-style trait. Reinterpret the type as a trait.
|
// New-style trait. Reinterpret the type as a trait.
|
||||||
let opt_trait_ref = match ty.node {
|
let opt_trait_ref = match ty.node {
|
||||||
TyPath(ref path, None, node_id) => {
|
TyPath(ref path, node_id) => {
|
||||||
Some(TraitRef {
|
Some(TraitRef {
|
||||||
path: (*path).clone(),
|
path: (*path).clone(),
|
||||||
ref_id: node_id,
|
ref_id: node_id,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
TyPath(_, Some(_), _) => {
|
|
||||||
self.span_err(ty.span,
|
|
||||||
"bounded traits are only valid in type position");
|
|
||||||
None
|
|
||||||
}
|
|
||||||
_ => {
|
_ => {
|
||||||
self.span_err(ty.span, "not a trait");
|
self.span_err(ty.span, "not a trait");
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
ty = self.parse_ty(true);
|
ty = self.parse_ty_sum();
|
||||||
opt_trait_ref
|
opt_trait_ref
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
|
@ -4606,7 +4584,7 @@ impl<'a> Parser<'a> {
|
||||||
/// Parse a::B<String,int>
|
/// Parse a::B<String,int>
|
||||||
fn parse_trait_ref(&mut self) -> TraitRef {
|
fn parse_trait_ref(&mut self) -> TraitRef {
|
||||||
ast::TraitRef {
|
ast::TraitRef {
|
||||||
path: self.parse_path(LifetimeAndTypesWithoutColons).path,
|
path: self.parse_path(LifetimeAndTypesWithoutColons),
|
||||||
ref_id: ast::DUMMY_NODE_ID,
|
ref_id: ast::DUMMY_NODE_ID,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -4638,7 +4616,7 @@ impl<'a> Parser<'a> {
|
||||||
let mut generics = self.parse_generics();
|
let mut generics = self.parse_generics();
|
||||||
|
|
||||||
if self.eat(&token::Colon) {
|
if self.eat(&token::Colon) {
|
||||||
let ty = self.parse_ty(true);
|
let ty = self.parse_ty_sum();
|
||||||
self.span_err(ty.span, "`virtual` structs have been removed from the language");
|
self.span_err(ty.span, "`virtual` structs have been removed from the language");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -4673,7 +4651,7 @@ impl<'a> Parser<'a> {
|
||||||
let struct_field_ = ast::StructField_ {
|
let struct_field_ = ast::StructField_ {
|
||||||
kind: UnnamedField(p.parse_visibility()),
|
kind: UnnamedField(p.parse_visibility()),
|
||||||
id: ast::DUMMY_NODE_ID,
|
id: ast::DUMMY_NODE_ID,
|
||||||
ty: p.parse_ty(true),
|
ty: p.parse_ty_sum(),
|
||||||
attrs: attrs,
|
attrs: attrs,
|
||||||
};
|
};
|
||||||
spanned(lo, p.span.hi, struct_field_)
|
spanned(lo, p.span.hi, struct_field_)
|
||||||
|
@ -4830,7 +4808,7 @@ impl<'a> Parser<'a> {
|
||||||
fn parse_item_const(&mut self, m: Option<Mutability>) -> ItemInfo {
|
fn parse_item_const(&mut self, m: Option<Mutability>) -> ItemInfo {
|
||||||
let id = self.parse_ident();
|
let id = self.parse_ident();
|
||||||
self.expect(&token::Colon);
|
self.expect(&token::Colon);
|
||||||
let ty = self.parse_ty(true);
|
let ty = self.parse_ty_sum();
|
||||||
self.expect(&token::Eq);
|
self.expect(&token::Eq);
|
||||||
let e = self.parse_expr();
|
let e = self.parse_expr();
|
||||||
self.commit_expr_expecting(&*e, token::Semi);
|
self.commit_expr_expecting(&*e, token::Semi);
|
||||||
|
@ -5023,7 +5001,7 @@ impl<'a> Parser<'a> {
|
||||||
|
|
||||||
let ident = self.parse_ident();
|
let ident = self.parse_ident();
|
||||||
self.expect(&token::Colon);
|
self.expect(&token::Colon);
|
||||||
let ty = self.parse_ty(true);
|
let ty = self.parse_ty_sum();
|
||||||
let hi = self.span.hi;
|
let hi = self.span.hi;
|
||||||
self.expect(&token::Semi);
|
self.expect(&token::Semi);
|
||||||
P(ForeignItem {
|
P(ForeignItem {
|
||||||
|
@ -5181,7 +5159,7 @@ impl<'a> Parser<'a> {
|
||||||
let mut tps = self.parse_generics();
|
let mut tps = self.parse_generics();
|
||||||
self.parse_where_clause(&mut tps);
|
self.parse_where_clause(&mut tps);
|
||||||
self.expect(&token::Eq);
|
self.expect(&token::Eq);
|
||||||
let ty = self.parse_ty(true);
|
let ty = self.parse_ty_sum();
|
||||||
self.expect(&token::Semi);
|
self.expect(&token::Semi);
|
||||||
(ident, ItemTy(ty, tps), None)
|
(ident, ItemTy(ty, tps), None)
|
||||||
}
|
}
|
||||||
|
@ -5235,7 +5213,7 @@ impl<'a> Parser<'a> {
|
||||||
&token::OpenDelim(token::Paren),
|
&token::OpenDelim(token::Paren),
|
||||||
&token::CloseDelim(token::Paren),
|
&token::CloseDelim(token::Paren),
|
||||||
seq_sep_trailing_allowed(token::Comma),
|
seq_sep_trailing_allowed(token::Comma),
|
||||||
|p| p.parse_ty(true)
|
|p| p.parse_ty_sum()
|
||||||
);
|
);
|
||||||
for ty in arg_tys.into_iter() {
|
for ty in arg_tys.into_iter() {
|
||||||
args.push(ast::VariantArg {
|
args.push(ast::VariantArg {
|
||||||
|
@ -5593,7 +5571,7 @@ impl<'a> Parser<'a> {
|
||||||
// MACRO INVOCATION ITEM
|
// MACRO INVOCATION ITEM
|
||||||
|
|
||||||
// item macro.
|
// item macro.
|
||||||
let pth = self.parse_path(NoTypesAllowed).path;
|
let pth = self.parse_path(NoTypesAllowed);
|
||||||
self.expect(&token::Not);
|
self.expect(&token::Not);
|
||||||
|
|
||||||
// a 'special' identifier (like what `macro_rules!` uses)
|
// a 'special' identifier (like what `macro_rules!` uses)
|
||||||
|
|
|
@ -293,6 +293,10 @@ pub fn ty_to_string(ty: &ast::Ty) -> String {
|
||||||
$to_string(|s| s.print_type(ty))
|
$to_string(|s| s.print_type(ty))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn bounds_to_string(bounds: &[ast::TyParamBound]) -> String {
|
||||||
|
$to_string(|s| s.print_bounds("", bounds))
|
||||||
|
}
|
||||||
|
|
||||||
pub fn pat_to_string(pat: &ast::Pat) -> String {
|
pub fn pat_to_string(pat: &ast::Pat) -> String {
|
||||||
$to_string(|s| s.print_pat(pat))
|
$to_string(|s| s.print_pat(pat))
|
||||||
}
|
}
|
||||||
|
@ -739,11 +743,15 @@ impl<'a> State<'a> {
|
||||||
Some(&generics),
|
Some(&generics),
|
||||||
None));
|
None));
|
||||||
}
|
}
|
||||||
ast::TyPath(ref path, ref bounds, _) => {
|
ast::TyPath(ref path, _) => {
|
||||||
try!(self.print_bounded_path(path, bounds));
|
try!(self.print_path(path, false));
|
||||||
|
}
|
||||||
|
ast::TyObjectSum(ref ty, ref bounds) => {
|
||||||
|
try!(self.print_type(&**ty));
|
||||||
|
try!(self.print_bounds("+", bounds.as_slice()));
|
||||||
}
|
}
|
||||||
ast::TyPolyTraitRef(ref bounds) => {
|
ast::TyPolyTraitRef(ref bounds) => {
|
||||||
try!(self.print_bounds("", bounds));
|
try!(self.print_bounds("", bounds.as_slice()));
|
||||||
}
|
}
|
||||||
ast::TyQPath(ref qpath) => {
|
ast::TyQPath(ref qpath) => {
|
||||||
try!(word(&mut self.s, "<"));
|
try!(word(&mut self.s, "<"));
|
||||||
|
@ -970,7 +978,7 @@ impl<'a> State<'a> {
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
try!(self.print_bounds(":", bounds));
|
try!(self.print_bounds(":", bounds.as_slice()));
|
||||||
try!(self.print_where_clause(generics));
|
try!(self.print_where_clause(generics));
|
||||||
try!(word(&mut self.s, " "));
|
try!(word(&mut self.s, " "));
|
||||||
try!(self.bopen());
|
try!(self.bopen());
|
||||||
|
@ -2329,7 +2337,7 @@ impl<'a> State<'a> {
|
||||||
|
|
||||||
pub fn print_bounds(&mut self,
|
pub fn print_bounds(&mut self,
|
||||||
prefix: &str,
|
prefix: &str,
|
||||||
bounds: &OwnedSlice<ast::TyParamBound>)
|
bounds: &[ast::TyParamBound])
|
||||||
-> IoResult<()> {
|
-> IoResult<()> {
|
||||||
if !bounds.is_empty() {
|
if !bounds.is_empty() {
|
||||||
try!(word(&mut self.s, prefix));
|
try!(word(&mut self.s, prefix));
|
||||||
|
@ -2418,7 +2426,7 @@ impl<'a> State<'a> {
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
try!(self.print_ident(param.ident));
|
try!(self.print_ident(param.ident));
|
||||||
try!(self.print_bounds(":", ¶m.bounds));
|
try!(self.print_bounds(":", param.bounds.as_slice()));
|
||||||
match param.default {
|
match param.default {
|
||||||
Some(ref default) => {
|
Some(ref default) => {
|
||||||
try!(space(&mut self.s));
|
try!(space(&mut self.s));
|
||||||
|
@ -2447,7 +2455,7 @@ impl<'a> State<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
try!(self.print_ident(predicate.ident));
|
try!(self.print_ident(predicate.ident));
|
||||||
try!(self.print_bounds(":", &predicate.bounds));
|
try!(self.print_bounds(":", predicate.bounds.as_slice()));
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -2664,7 +2672,7 @@ impl<'a> State<'a> {
|
||||||
try!(self.pclose());
|
try!(self.pclose());
|
||||||
}
|
}
|
||||||
|
|
||||||
try!(self.print_bounds(":", bounds));
|
try!(self.print_bounds(":", bounds.as_slice()));
|
||||||
|
|
||||||
try!(self.print_fn_output(decl));
|
try!(self.print_fn_output(decl));
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue