1
Fork 0

Auto merge of #109547 - matthiaskrgr:rollup-zczqgdk, r=matthiaskrgr

Rollup of 9 pull requests

Successful merges:

 - #108629 (rustdoc: add support for type filters in arguments and generics)
 - #108924 (panic_immediate_abort requires abort as a panic strategy)
 - #108961 (Refine error spans for const args in hir typeck)
 - #108986 (sync LVI tests)
 - #109142 (Add block-based mutex unlocking example)
 - #109368 (fix typo in the creation of OpenOption for RustyHermit)
 - #109493 (Return nested obligations from canonical response var unification)
 - #109515 (Add AixLinker to support linking on AIX)
 - #109536 (resolve: Rename some cstore methods to match queries and add comments)

Failed merges:

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2023-03-24 02:29:48 +00:00
commit 4c0f5008ce
48 changed files with 717 additions and 289 deletions

View file

@ -133,6 +133,9 @@ pub fn get_linker<'a>(
LinkerFlavor::Unix(Cc::No) if sess.target.os == "l4re" => { LinkerFlavor::Unix(Cc::No) if sess.target.os == "l4re" => {
Box::new(L4Bender::new(cmd, sess)) as Box<dyn Linker> Box::new(L4Bender::new(cmd, sess)) as Box<dyn Linker>
} }
LinkerFlavor::Unix(Cc::No) if sess.target.os == "aix" => {
Box::new(AixLinker::new(cmd, sess)) as Box<dyn Linker>
}
LinkerFlavor::WasmLld(Cc::No) => Box::new(WasmLd::new(cmd, sess)) as Box<dyn Linker>, LinkerFlavor::WasmLld(Cc::No) => Box::new(WasmLd::new(cmd, sess)) as Box<dyn Linker>,
LinkerFlavor::Gnu(cc, _) LinkerFlavor::Gnu(cc, _)
| LinkerFlavor::Darwin(cc, _) | LinkerFlavor::Darwin(cc, _)
@ -1474,6 +1477,177 @@ impl<'a> L4Bender<'a> {
} }
} }
/// Linker for AIX.
pub struct AixLinker<'a> {
cmd: Command,
sess: &'a Session,
hinted_static: bool,
}
impl<'a> AixLinker<'a> {
pub fn new(cmd: Command, sess: &'a Session) -> AixLinker<'a> {
AixLinker { cmd: cmd, sess: sess, hinted_static: false }
}
fn hint_static(&mut self) {
if !self.hinted_static {
self.cmd.arg("-bstatic");
self.hinted_static = true;
}
}
fn hint_dynamic(&mut self) {
if self.hinted_static {
self.cmd.arg("-bdynamic");
self.hinted_static = false;
}
}
fn build_dylib(&mut self, _out_filename: &Path) {
self.cmd.arg("-bM:SRE");
self.cmd.arg("-bnoentry");
// FIXME: Use CreateExportList utility to create export list
// and remove -bexpfull.
self.cmd.arg("-bexpfull");
}
}
impl<'a> Linker for AixLinker<'a> {
fn link_dylib(&mut self, lib: &str, _verbatim: bool, _as_needed: bool) {
self.hint_dynamic();
self.cmd.arg(format!("-l{}", lib));
}
fn link_staticlib(&mut self, lib: &str, _verbatim: bool) {
self.hint_static();
self.cmd.arg(format!("-l{}", lib));
}
fn link_rlib(&mut self, lib: &Path) {
self.hint_static();
self.cmd.arg(lib);
}
fn include_path(&mut self, path: &Path) {
self.cmd.arg("-L").arg(path);
}
fn framework_path(&mut self, _: &Path) {
bug!("frameworks are not supported on AIX");
}
fn output_filename(&mut self, path: &Path) {
self.cmd.arg("-o").arg(path);
}
fn add_object(&mut self, path: &Path) {
self.cmd.arg(path);
}
fn full_relro(&mut self) {}
fn partial_relro(&mut self) {}
fn no_relro(&mut self) {}
fn cmd(&mut self) -> &mut Command {
&mut self.cmd
}
fn set_output_kind(&mut self, output_kind: LinkOutputKind, out_filename: &Path) {
match output_kind {
LinkOutputKind::DynamicDylib => {
self.hint_dynamic();
self.build_dylib(out_filename);
}
LinkOutputKind::StaticDylib => {
self.hint_static();
self.build_dylib(out_filename);
}
_ => {}
}
}
fn link_rust_dylib(&mut self, lib: &str, _: &Path) {
self.hint_dynamic();
self.cmd.arg(format!("-l{}", lib));
}
fn link_framework(&mut self, _framework: &str, _as_needed: bool) {
bug!("frameworks not supported on AIX");
}
fn link_whole_staticlib(&mut self, lib: &str, verbatim: bool, search_path: &[PathBuf]) {
self.hint_static();
let lib = find_native_static_library(lib, verbatim, search_path, &self.sess);
self.cmd.arg(format!("-bkeepfile:{}", lib.to_str().unwrap()));
}
fn link_whole_rlib(&mut self, lib: &Path) {
self.hint_static();
self.cmd.arg(format!("-bkeepfile:{}", lib.to_str().unwrap()));
}
fn gc_sections(&mut self, _keep_metadata: bool) {
self.cmd.arg("-bgc");
}
fn no_gc_sections(&mut self) {
self.cmd.arg("-bnogc");
}
fn optimize(&mut self) {}
fn pgo_gen(&mut self) {}
fn control_flow_guard(&mut self) {}
fn debuginfo(&mut self, strip: Strip, _: &[PathBuf]) {
match strip {
Strip::None => {}
// FIXME: -s strips the symbol table, line number information
// and relocation information.
Strip::Debuginfo | Strip::Symbols => {
self.cmd.arg("-s");
}
}
}
fn no_crt_objects(&mut self) {}
fn no_default_libraries(&mut self) {}
fn export_symbols(&mut self, tmpdir: &Path, _crate_type: CrateType, symbols: &[String]) {
let path = tmpdir.join("list.exp");
let res: io::Result<()> = try {
let mut f = BufWriter::new(File::create(&path)?);
// TODO: use llvm-nm to generate export list.
for symbol in symbols {
debug!(" _{}", symbol);
writeln!(f, " {}", symbol)?;
}
};
if let Err(e) = res {
self.sess.fatal(&format!("failed to write export file: {}", e));
}
self.cmd.arg(format!("-bE:{}", path.to_str().unwrap()));
}
fn subsystem(&mut self, _subsystem: &str) {}
fn reset_per_library_state(&mut self) {
self.hint_dynamic();
}
fn linker_plugin_lto(&mut self) {}
fn add_eh_frame_header(&mut self) {}
fn add_no_exec(&mut self) {}
fn add_as_needed(&mut self) {}
}
fn for_each_exported_symbols_include_dep<'tcx>( fn for_each_exported_symbols_include_dep<'tcx>(
tcx: TyCtxt<'tcx>, tcx: TyCtxt<'tcx>,
crate_type: CrateType, crate_type: CrateType,

View file

@ -4,7 +4,7 @@ use rustc_hir::def::Res;
use rustc_hir::def_id::DefId; use rustc_hir::def_id::DefId;
use rustc_infer::traits::ObligationCauseCode; use rustc_infer::traits::ObligationCauseCode;
use rustc_middle::ty::{self, Ty, TyCtxt, TypeSuperVisitable, TypeVisitable, TypeVisitor}; use rustc_middle::ty::{self, Ty, TyCtxt, TypeSuperVisitable, TypeVisitable, TypeVisitor};
use rustc_span::{self, Span}; use rustc_span::{self, symbol::kw, Span};
use rustc_trait_selection::traits; use rustc_trait_selection::traits;
use std::ops::ControlFlow; use std::ops::ControlFlow;
@ -25,17 +25,28 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let generics = self.tcx.generics_of(def_id); let generics = self.tcx.generics_of(def_id);
let predicate_substs = match unsubstituted_pred.kind().skip_binder() { let predicate_substs = match unsubstituted_pred.kind().skip_binder() {
ty::PredicateKind::Clause(ty::Clause::Trait(pred)) => pred.trait_ref.substs, ty::PredicateKind::Clause(ty::Clause::Trait(pred)) => pred.trait_ref.substs.to_vec(),
ty::PredicateKind::Clause(ty::Clause::Projection(pred)) => pred.projection_ty.substs, ty::PredicateKind::Clause(ty::Clause::Projection(pred)) => {
_ => ty::List::empty(), pred.projection_ty.substs.to_vec()
}
ty::PredicateKind::Clause(ty::Clause::ConstArgHasType(arg, ty)) => {
vec![ty.into(), arg.into()]
}
ty::PredicateKind::ConstEvaluatable(e) => vec![e.into()],
_ => return false,
}; };
let find_param_matching = |matches: &dyn Fn(&ty::ParamTy) -> bool| { let find_param_matching = |matches: &dyn Fn(ty::ParamTerm) -> bool| {
predicate_substs.types().find_map(|ty| { predicate_substs.iter().find_map(|arg| {
ty.walk().find_map(|arg| { arg.walk().find_map(|arg| {
if let ty::GenericArgKind::Type(ty) = arg.unpack() if let ty::GenericArgKind::Type(ty) = arg.unpack()
&& let ty::Param(param_ty) = ty.kind() && let ty::Param(param_ty) = *ty.kind()
&& matches(param_ty) && matches(ty::ParamTerm::Ty(param_ty))
{
Some(arg)
} else if let ty::GenericArgKind::Const(ct) = arg.unpack()
&& let ty::ConstKind::Param(param_ct) = ct.kind()
&& matches(ty::ParamTerm::Const(param_ct))
{ {
Some(arg) Some(arg)
} else { } else {
@ -47,21 +58,22 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// Prefer generics that are local to the fn item, since these are likely // Prefer generics that are local to the fn item, since these are likely
// to be the cause of the unsatisfied predicate. // to be the cause of the unsatisfied predicate.
let mut param_to_point_at = find_param_matching(&|param_ty| { let mut param_to_point_at = find_param_matching(&|param_term| {
self.tcx.parent(generics.type_param(param_ty, self.tcx).def_id) == def_id self.tcx.parent(generics.param_at(param_term.index(), self.tcx).def_id) == def_id
}); });
// Fall back to generic that isn't local to the fn item. This will come // Fall back to generic that isn't local to the fn item. This will come
// from a trait or impl, for example. // from a trait or impl, for example.
let mut fallback_param_to_point_at = find_param_matching(&|param_ty| { let mut fallback_param_to_point_at = find_param_matching(&|param_term| {
self.tcx.parent(generics.type_param(param_ty, self.tcx).def_id) != def_id self.tcx.parent(generics.param_at(param_term.index(), self.tcx).def_id) != def_id
&& param_ty.name != rustc_span::symbol::kw::SelfUpper && !matches!(param_term, ty::ParamTerm::Ty(ty) if ty.name == kw::SelfUpper)
}); });
// Finally, the `Self` parameter is possibly the reason that the predicate // Finally, the `Self` parameter is possibly the reason that the predicate
// is unsatisfied. This is less likely to be true for methods, because // is unsatisfied. This is less likely to be true for methods, because
// method probe means that we already kinda check that the predicates due // method probe means that we already kinda check that the predicates due
// to the `Self` type are true. // to the `Self` type are true.
let mut self_param_to_point_at = let mut self_param_to_point_at = find_param_matching(
find_param_matching(&|param_ty| param_ty.name == rustc_span::symbol::kw::SelfUpper); &|param_term| matches!(param_term, ty::ParamTerm::Ty(ty) if ty.name == kw::SelfUpper),
);
// Finally, for ambiguity-related errors, we actually want to look // Finally, for ambiguity-related errors, we actually want to look
// for a parameter that is the source of the inference type left // for a parameter that is the source of the inference type left
@ -225,14 +237,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
.own_substs(ty::InternalSubsts::identity_for_item(self.tcx, def_id)); .own_substs(ty::InternalSubsts::identity_for_item(self.tcx, def_id));
let Some((index, _)) = own_substs let Some((index, _)) = own_substs
.iter() .iter()
.filter(|arg| matches!(arg.unpack(), ty::GenericArgKind::Type(_)))
.enumerate() .enumerate()
.find(|(_, arg)| **arg == param_to_point_at) else { return false }; .find(|(_, arg)| **arg == param_to_point_at) else { return false };
let Some(arg) = segment let Some(arg) = segment
.args() .args()
.args .args
.iter() .iter()
.filter(|arg| matches!(arg, hir::GenericArg::Type(_)))
.nth(index) else { return false; }; .nth(index) else { return false; };
error.obligation.cause.span = arg error.obligation.cause.span = arg
.span() .span()

View file

@ -1041,13 +1041,6 @@ impl<'a, 'tcx> CrateMetadataRef<'a> {
self.root.tables.optimized_mir.get(self, id).is_some() self.root.tables.optimized_mir.get(self, id).is_some()
} }
fn module_expansion(self, id: DefIndex, sess: &Session) -> ExpnId {
match self.def_kind(id) {
DefKind::Mod | DefKind::Enum | DefKind::Trait => self.get_expn_that_defined(id, sess),
_ => panic!("Expected module, found {:?}", self.local_def_id(id)),
}
}
fn get_fn_has_self_parameter(self, id: DefIndex, sess: &'a Session) -> bool { fn get_fn_has_self_parameter(self, id: DefIndex, sess: &'a Session) -> bool {
self.root self.root
.tables .tables

View file

@ -490,6 +490,9 @@ pub(in crate::rmeta) fn provide(providers: &mut Providers) {
.alloc_slice(&CStore::from_tcx(tcx).crate_dependencies_in_postorder(LOCAL_CRATE)) .alloc_slice(&CStore::from_tcx(tcx).crate_dependencies_in_postorder(LOCAL_CRATE))
}, },
crates: |tcx, ()| { crates: |tcx, ()| {
// The list of loaded crates is now frozen in query cache,
// so make sure cstore is not mutably accessed from here on.
tcx.untracked().cstore.leak();
tcx.arena.alloc_from_iter(CStore::from_tcx(tcx).iter_crate_data().map(|(cnum, _)| cnum)) tcx.arena.alloc_from_iter(CStore::from_tcx(tcx).iter_crate_data().map(|(cnum, _)| cnum))
}, },
..*providers ..*providers
@ -537,16 +540,16 @@ impl CStore {
) )
} }
pub fn get_span_untracked(&self, def_id: DefId, sess: &Session) -> Span { pub fn def_span_untracked(&self, def_id: DefId, sess: &Session) -> Span {
self.get_crate_data(def_id.krate).get_span(def_id.index, sess) self.get_crate_data(def_id.krate).get_span(def_id.index, sess)
} }
pub fn def_kind(&self, def: DefId) -> DefKind { pub fn def_kind_untracked(&self, def: DefId) -> DefKind {
self.get_crate_data(def.krate).def_kind(def.index) self.get_crate_data(def.krate).def_kind(def.index)
} }
pub fn module_expansion_untracked(&self, def_id: DefId, sess: &Session) -> ExpnId { pub fn expn_that_defined_untracked(&self, def_id: DefId, sess: &Session) -> ExpnId {
self.get_crate_data(def_id.krate).module_expansion(def_id.index, sess) self.get_crate_data(def_id.krate).get_expn_that_defined(def_id.index, sess)
} }
/// Only public-facing way to traverse all the definitions in a non-local crate. /// Only public-facing way to traverse all the definitions in a non-local crate.

View file

@ -1051,6 +1051,21 @@ impl<'tcx> TermKind<'tcx> {
} }
} }
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub enum ParamTerm {
Ty(ParamTy),
Const(ParamConst),
}
impl ParamTerm {
pub fn index(self) -> usize {
match self {
ParamTerm::Ty(ty) => ty.index as usize,
ParamTerm::Const(ct) => ct.index as usize,
}
}
}
/// This kind of predicate has no *direct* correspondent in the /// This kind of predicate has no *direct* correspondent in the
/// syntax, but it roughly corresponds to the syntactic forms: /// syntax, but it roughly corresponds to the syntactic forms:
/// ///

View file

@ -114,13 +114,16 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
} }
if !def_id.is_local() { if !def_id.is_local() {
let def_kind = self.cstore().def_kind(def_id); // Query `def_kind` is not used because query system overhead is too expensive here.
let def_kind = self.cstore().def_kind_untracked(def_id);
if let DefKind::Mod | DefKind::Enum | DefKind::Trait = def_kind { if let DefKind::Mod | DefKind::Enum | DefKind::Trait = def_kind {
let parent = self let parent = self
.tcx .tcx
.opt_parent(def_id) .opt_parent(def_id)
.map(|parent_id| self.get_nearest_non_block_module(parent_id)); .map(|parent_id| self.get_nearest_non_block_module(parent_id));
let expn_id = self.cstore().module_expansion_untracked(def_id, &self.tcx.sess); // Query `expn_that_defined` is not used because
// hashing spans in its result is expensive.
let expn_id = self.cstore().expn_that_defined_untracked(def_id, &self.tcx.sess);
return Some(self.new_module( return Some(self.new_module(
parent, parent,
ModuleKind::Def(def_kind, def_id, self.tcx.item_name(def_id)), ModuleKind::Def(def_kind, def_id, self.tcx.item_name(def_id)),
@ -194,6 +197,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
} }
pub(crate) fn build_reduced_graph_external(&mut self, module: Module<'a>) { pub(crate) fn build_reduced_graph_external(&mut self, module: Module<'a>) {
// Query `module_children` is not used because hashing spans in its result is expensive.
let children = let children =
Vec::from_iter(self.cstore().module_children_untracked(module.def_id(), self.tcx.sess)); Vec::from_iter(self.cstore().module_children_untracked(module.def_id(), self.tcx.sess));
for child in children { for child in children {

View file

@ -1875,7 +1875,8 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
fn def_span(&self, def_id: DefId) -> Span { fn def_span(&self, def_id: DefId) -> Span {
match def_id.as_local() { match def_id.as_local() {
Some(def_id) => self.tcx.source_span(def_id), Some(def_id) => self.tcx.source_span(def_id),
None => self.cstore().get_span_untracked(def_id, self.tcx.sess), // Query `def_span` is not used because hashing its result span is expensive.
None => self.cstore().def_span_untracked(def_id, self.tcx.sess),
} }
} }

View file

@ -99,20 +99,20 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
param_env: ty::ParamEnv<'tcx>, param_env: ty::ParamEnv<'tcx>,
original_values: Vec<ty::GenericArg<'tcx>>, original_values: Vec<ty::GenericArg<'tcx>>,
response: CanonicalResponse<'tcx>, response: CanonicalResponse<'tcx>,
) -> Result<Certainty, NoSolution> { ) -> Result<(Certainty, Vec<Goal<'tcx, ty::Predicate<'tcx>>>), NoSolution> {
let substitution = self.compute_query_response_substitution(&original_values, &response); let substitution = self.compute_query_response_substitution(&original_values, &response);
let Response { var_values, external_constraints, certainty } = let Response { var_values, external_constraints, certainty } =
response.substitute(self.tcx(), &substitution); response.substitute(self.tcx(), &substitution);
self.unify_query_var_values(param_env, &original_values, var_values)?; let nested_goals = self.unify_query_var_values(param_env, &original_values, var_values)?;
// FIXME: implement external constraints. // FIXME: implement external constraints.
let ExternalConstraintsData { region_constraints, opaque_types: _ } = let ExternalConstraintsData { region_constraints, opaque_types: _ } =
external_constraints.deref(); external_constraints.deref();
self.register_region_constraints(region_constraints); self.register_region_constraints(region_constraints);
Ok(certainty) Ok((certainty, nested_goals))
} }
/// This returns the substitutions to instantiate the bound variables of /// This returns the substitutions to instantiate the bound variables of
@ -205,21 +205,15 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
param_env: ty::ParamEnv<'tcx>, param_env: ty::ParamEnv<'tcx>,
original_values: &[ty::GenericArg<'tcx>], original_values: &[ty::GenericArg<'tcx>],
var_values: CanonicalVarValues<'tcx>, var_values: CanonicalVarValues<'tcx>,
) -> Result<(), NoSolution> { ) -> Result<Vec<Goal<'tcx, ty::Predicate<'tcx>>>, NoSolution> {
assert_eq!(original_values.len(), var_values.len()); assert_eq!(original_values.len(), var_values.len());
let mut nested_goals = vec![];
for (&orig, response) in iter::zip(original_values, var_values.var_values) { for (&orig, response) in iter::zip(original_values, var_values.var_values) {
// This can fail due to the occurs check, see nested_goals.extend(self.eq_and_get_goals(param_env, orig, response)?);
// `tests/ui/typeck/lazy-norm/equating-projection-cyclically.rs` for an example
// where that can happen.
//
// FIXME: To deal with #105787 I also expect us to emit nested obligations here at
// some point. We can figure out how to deal with this once we actually have
// an ICE.
let nested_goals = self.eq_and_get_goals(param_env, orig, response)?;
assert!(nested_goals.is_empty(), "{nested_goals:?}");
} }
Ok(()) Ok(nested_goals)
} }
fn register_region_constraints(&mut self, region_constraints: &QueryRegionConstraints<'tcx>) { fn register_region_constraints(&mut self, region_constraints: &QueryRegionConstraints<'tcx>) {

View file

@ -70,7 +70,7 @@ pub trait InferCtxtEvalExt<'tcx> {
fn evaluate_root_goal( fn evaluate_root_goal(
&self, &self,
goal: Goal<'tcx, ty::Predicate<'tcx>>, goal: Goal<'tcx, ty::Predicate<'tcx>>,
) -> Result<(bool, Certainty), NoSolution>; ) -> Result<(bool, Certainty, Vec<Goal<'tcx, ty::Predicate<'tcx>>>), NoSolution>;
} }
impl<'tcx> InferCtxtEvalExt<'tcx> for InferCtxt<'tcx> { impl<'tcx> InferCtxtEvalExt<'tcx> for InferCtxt<'tcx> {
@ -78,9 +78,8 @@ impl<'tcx> InferCtxtEvalExt<'tcx> for InferCtxt<'tcx> {
fn evaluate_root_goal( fn evaluate_root_goal(
&self, &self,
goal: Goal<'tcx, ty::Predicate<'tcx>>, goal: Goal<'tcx, ty::Predicate<'tcx>>,
) -> Result<(bool, Certainty), NoSolution> { ) -> Result<(bool, Certainty, Vec<Goal<'tcx, ty::Predicate<'tcx>>>), NoSolution> {
let mode = if self.intercrate { SolverMode::Coherence } else { SolverMode::Normal }; let mode = if self.intercrate { SolverMode::Coherence } else { SolverMode::Normal };
let mut search_graph = search_graph::SearchGraph::new(self.tcx, mode); let mut search_graph = search_graph::SearchGraph::new(self.tcx, mode);
let mut ecx = EvalCtxt { let mut ecx = EvalCtxt {
@ -152,13 +151,13 @@ impl<'a, 'tcx> EvalCtxt<'a, 'tcx> {
&mut self, &mut self,
is_normalizes_to_hack: IsNormalizesToHack, is_normalizes_to_hack: IsNormalizesToHack,
goal: Goal<'tcx, ty::Predicate<'tcx>>, goal: Goal<'tcx, ty::Predicate<'tcx>>,
) -> Result<(bool, Certainty), NoSolution> { ) -> Result<(bool, Certainty, Vec<Goal<'tcx, ty::Predicate<'tcx>>>), NoSolution> {
let (orig_values, canonical_goal) = self.canonicalize_goal(goal); let (orig_values, canonical_goal) = self.canonicalize_goal(goal);
let canonical_response = let canonical_response =
EvalCtxt::evaluate_canonical_goal(self.tcx(), self.search_graph, canonical_goal)?; EvalCtxt::evaluate_canonical_goal(self.tcx(), self.search_graph, canonical_goal)?;
let has_changed = !canonical_response.value.var_values.is_identity(); let has_changed = !canonical_response.value.var_values.is_identity();
let certainty = self.instantiate_and_apply_query_response( let (certainty, nested_goals) = self.instantiate_and_apply_query_response(
goal.param_env, goal.param_env,
orig_values, orig_values,
canonical_response, canonical_response,
@ -186,7 +185,7 @@ impl<'a, 'tcx> EvalCtxt<'a, 'tcx> {
assert_eq!(certainty, canonical_response.value.certainty); assert_eq!(certainty, canonical_response.value.certainty);
} }
Ok((has_changed, certainty)) Ok((has_changed, certainty, nested_goals))
} }
fn compute_goal(&mut self, goal: Goal<'tcx, ty::Predicate<'tcx>>) -> QueryResult<'tcx> { fn compute_goal(&mut self, goal: Goal<'tcx, ty::Predicate<'tcx>>) -> QueryResult<'tcx> {
@ -263,13 +262,14 @@ impl<'a, 'tcx> EvalCtxt<'a, 'tcx> {
let mut has_changed = Err(Certainty::Yes); let mut has_changed = Err(Certainty::Yes);
if let Some(goal) = goals.normalizes_to_hack_goal.take() { if let Some(goal) = goals.normalizes_to_hack_goal.take() {
let (_, certainty) = match this.evaluate_goal( let (_, certainty, nested_goals) = match this.evaluate_goal(
IsNormalizesToHack::Yes, IsNormalizesToHack::Yes,
goal.with(this.tcx(), ty::Binder::dummy(goal.predicate)), goal.with(this.tcx(), ty::Binder::dummy(goal.predicate)),
) { ) {
Ok(r) => r, Ok(r) => r,
Err(NoSolution) => return Some(Err(NoSolution)), Err(NoSolution) => return Some(Err(NoSolution)),
}; };
new_goals.goals.extend(nested_goals);
if goal.predicate.projection_ty if goal.predicate.projection_ty
!= this.resolve_vars_if_possible(goal.predicate.projection_ty) != this.resolve_vars_if_possible(goal.predicate.projection_ty)
@ -308,11 +308,12 @@ impl<'a, 'tcx> EvalCtxt<'a, 'tcx> {
} }
for nested_goal in goals.goals.drain(..) { for nested_goal in goals.goals.drain(..) {
let (changed, certainty) = let (changed, certainty, nested_goals) =
match this.evaluate_goal(IsNormalizesToHack::No, nested_goal) { match this.evaluate_goal(IsNormalizesToHack::No, nested_goal) {
Ok(result) => result, Ok(result) => result,
Err(NoSolution) => return Some(Err(NoSolution)), Err(NoSolution) => return Some(Err(NoSolution)),
}; };
new_goals.goals.extend(nested_goals);
if changed { if changed {
has_changed = Ok(()); has_changed = Ok(());

View file

@ -1,6 +1,7 @@
use std::mem; use std::mem;
use rustc_infer::infer::InferCtxt; use rustc_infer::infer::InferCtxt;
use rustc_infer::traits::Obligation;
use rustc_infer::traits::{ use rustc_infer::traits::{
query::NoSolution, FulfillmentError, FulfillmentErrorCode, MismatchedProjectionTypes, query::NoSolution, FulfillmentError, FulfillmentErrorCode, MismatchedProjectionTypes,
PredicateObligation, SelectionError, TraitEngine, PredicateObligation, SelectionError, TraitEngine,
@ -61,7 +62,7 @@ impl<'tcx> TraitEngine<'tcx> for FulfillmentCtxt<'tcx> {
let mut has_changed = false; let mut has_changed = false;
for obligation in mem::take(&mut self.obligations) { for obligation in mem::take(&mut self.obligations) {
let goal = obligation.clone().into(); let goal = obligation.clone().into();
let (changed, certainty) = match infcx.evaluate_root_goal(goal) { let (changed, certainty, nested_goals) = match infcx.evaluate_root_goal(goal) {
Ok(result) => result, Ok(result) => result,
Err(NoSolution) => { Err(NoSolution) => {
errors.push(FulfillmentError { errors.push(FulfillmentError {
@ -125,7 +126,16 @@ impl<'tcx> TraitEngine<'tcx> for FulfillmentCtxt<'tcx> {
continue; continue;
} }
}; };
// Push any nested goals that we get from unifying our canonical response
// with our obligation onto the fulfillment context.
self.obligations.extend(nested_goals.into_iter().map(|goal| {
Obligation::new(
infcx.tcx,
obligation.cause.clone(),
goal.param_env,
goal.predicate,
)
}));
has_changed |= changed; has_changed |= changed;
match certainty { match certainty {
Certainty::Yes => {} Certainty::Yes => {}

View file

@ -1282,10 +1282,20 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
), ),
ty::PredicateKind::Clause(ty::Clause::ConstArgHasType(ct, ty)) => { ty::PredicateKind::Clause(ty::Clause::ConstArgHasType(ct, ty)) => {
self.tcx.sess.struct_span_err( let mut diag = self.tcx.sess.struct_span_err(
span, span,
&format!("the constant `{}` is not of type `{}`", ct, ty), &format!("the constant `{}` is not of type `{}`", ct, ty),
) );
self.note_type_err(
&mut diag,
&obligation.cause,
None,
None,
TypeError::Sorts(ty::error::ExpectedFound::new(true, ty, ct.ty())),
false,
false,
);
diag
} }
} }
} }

View file

@ -1,9 +1,8 @@
use rustc_middle::traits::solve::{Certainty, Goal, MaybeCause}; use rustc_infer::traits::{TraitEngine, TraitEngineExt};
use rustc_middle::ty; use rustc_middle::ty;
use crate::infer::canonical::OriginalQueryValues; use crate::infer::canonical::OriginalQueryValues;
use crate::infer::InferCtxt; use crate::infer::InferCtxt;
use crate::solve::InferCtxtEvalExt;
use crate::traits::{EvaluationResult, OverflowError, PredicateObligation, SelectionContext}; use crate::traits::{EvaluationResult, OverflowError, PredicateObligation, SelectionContext};
pub trait InferCtxtExt<'tcx> { pub trait InferCtxtExt<'tcx> {
@ -81,27 +80,20 @@ impl<'tcx> InferCtxtExt<'tcx> for InferCtxt<'tcx> {
if self.tcx.trait_solver_next() { if self.tcx.trait_solver_next() {
self.probe(|snapshot| { self.probe(|snapshot| {
if let Ok((_, certainty)) = let mut fulfill_cx = crate::solve::FulfillmentCtxt::new();
self.evaluate_root_goal(Goal::new(self.tcx, param_env, obligation.predicate)) fulfill_cx.register_predicate_obligation(self, obligation.clone());
{ // True errors
match certainty { // FIXME(-Ztrait-solver=next): Overflows are reported as ambig here, is that OK?
Certainty::Yes => { if !fulfill_cx.select_where_possible(self).is_empty() {
if self.opaque_types_added_in_snapshot(snapshot) {
Ok(EvaluationResult::EvaluatedToOkModuloOpaqueTypes)
} else if self.region_constraints_added_in_snapshot(snapshot).is_some()
{
Ok(EvaluationResult::EvaluatedToOkModuloRegions)
} else {
Ok(EvaluationResult::EvaluatedToOk)
}
}
Certainty::Maybe(MaybeCause::Ambiguity) => {
Ok(EvaluationResult::EvaluatedToAmbig)
}
Certainty::Maybe(MaybeCause::Overflow) => Err(OverflowError::Canonical),
}
} else {
Ok(EvaluationResult::EvaluatedToErr) Ok(EvaluationResult::EvaluatedToErr)
} else if !fulfill_cx.select_all_or_error(self).is_empty() {
Ok(EvaluationResult::EvaluatedToAmbig)
} else if self.opaque_types_added_in_snapshot(snapshot) {
Ok(EvaluationResult::EvaluatedToOkModuloOpaqueTypes)
} else if self.region_constraints_added_in_snapshot(snapshot).is_some() {
Ok(EvaluationResult::EvaluatedToOkModuloRegions)
} else {
Ok(EvaluationResult::EvaluatedToOk)
} }
}) })
} else { } else {

View file

@ -618,6 +618,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
let mut fulfill_cx = crate::solve::FulfillmentCtxt::new(); let mut fulfill_cx = crate::solve::FulfillmentCtxt::new();
fulfill_cx.register_predicate_obligations(self.infcx, predicates); fulfill_cx.register_predicate_obligations(self.infcx, predicates);
// True errors // True errors
// FIXME(-Ztrait-solver=next): Overflows are reported as ambig here, is that OK?
if !fulfill_cx.select_where_possible(self.infcx).is_empty() { if !fulfill_cx.select_where_possible(self.infcx).is_empty() {
return Ok(EvaluatedToErr); return Ok(EvaluatedToErr);
} }

View file

@ -29,6 +29,9 @@
use crate::fmt; use crate::fmt;
use crate::panic::{Location, PanicInfo}; use crate::panic::{Location, PanicInfo};
#[cfg(feature = "panic_immediate_abort")]
const _: () = assert!(cfg!(panic = "abort"), "panic_immediate_abort requires -C panic=abort");
// First we define the two main entry points that all panics go through. // First we define the two main entry points that all panics go through.
// In the end both are just convenience wrappers around `panic_impl`. // In the end both are just convenience wrappers around `panic_impl`.

View file

@ -107,8 +107,8 @@ use crate::sys::locks as sys;
/// *guard += 1; /// *guard += 1;
/// ``` /// ```
/// ///
/// It is sometimes necessary to manually drop the mutex guard to unlock it /// To unlock a mutex guard sooner than the end of the enclosing scope,
/// sooner than the end of the enclosing scope. /// either create an inner scope or drop the guard manually.
/// ///
/// ``` /// ```
/// use std::sync::{Arc, Mutex}; /// use std::sync::{Arc, Mutex};
@ -125,11 +125,18 @@ use crate::sys::locks as sys;
/// let res_mutex_clone = Arc::clone(&res_mutex); /// let res_mutex_clone = Arc::clone(&res_mutex);
/// ///
/// threads.push(thread::spawn(move || { /// threads.push(thread::spawn(move || {
/// let mut data = data_mutex_clone.lock().unwrap(); /// // Here we use a block to limit the lifetime of the lock guard.
/// // This is the result of some important and long-ish work. /// let result = {
/// let result = data.iter().fold(0, |acc, x| acc + x * 2); /// let mut data = data_mutex_clone.lock().unwrap();
/// data.push(result); /// // This is the result of some important and long-ish work.
/// drop(data); /// let result = data.iter().fold(0, |acc, x| acc + x * 2);
/// data.push(result);
/// result
/// // The mutex guard gets dropped here, together with any other values
/// // created in the critical section.
/// };
/// // The guard created here is a temporary dropped at the end of the statement, i.e.
/// // the lock would not remain being held even if the thread did some additional work.
/// *res_mutex_clone.lock().unwrap() += result; /// *res_mutex_clone.lock().unwrap() += result;
/// })); /// }));
/// }); /// });
@ -146,6 +153,8 @@ use crate::sys::locks as sys;
/// // It's even more important here than in the threads because we `.join` the /// // It's even more important here than in the threads because we `.join` the
/// // threads after that. If we had not dropped the mutex guard, a thread could /// // threads after that. If we had not dropped the mutex guard, a thread could
/// // be waiting forever for it, causing a deadlock. /// // be waiting forever for it, causing a deadlock.
/// // As in the threads, a block could have been used instead of calling the
/// // `drop` function.
/// drop(data); /// drop(data);
/// // Here the mutex guard is not assigned to a variable and so, even if the /// // Here the mutex guard is not assigned to a variable and so, even if the
/// // scope does not end after this line, the mutex is still released: there is /// // scope does not end after this line, the mutex is still released: there is
@ -160,6 +169,7 @@ use crate::sys::locks as sys;
/// ///
/// assert_eq!(*res_mutex.lock().unwrap(), 800); /// assert_eq!(*res_mutex.lock().unwrap(), 800);
/// ``` /// ```
///
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
#[cfg_attr(not(test), rustc_diagnostic_item = "Mutex")] #[cfg_attr(not(test), rustc_diagnostic_item = "Mutex")]
pub struct Mutex<T: ?Sized> { pub struct Mutex<T: ?Sized> {

View file

@ -202,7 +202,7 @@ impl OpenOptions {
create: false, create: false,
create_new: false, create_new: false,
// system-specific // system-specific
mode: 0x777, mode: 0o777,
} }
} }

View file

@ -80,13 +80,20 @@ functions, and "In Return Types" shows matches in the return types of functions.
Both are very useful when looking for a function whose name you can't quite Both are very useful when looking for a function whose name you can't quite
bring to mind when you know the type you have or want. bring to mind when you know the type you have or want.
When typing in the search bar, you can prefix your search term with a type Names in the search interface can be prefixed with an item type followed by a
followed by a colon (such as `mod:`) to restrict the results to just that colon (such as `mod:`) to restrict the results to just that kind of item. Also,
kind of item. (The available items are listed in the help popup.) searching for `println!` will search for a macro named `println`, just like
Searching for `println!` will search for a macro named `println`, just like
searching for `macro:println` does. searching for `macro:println` does.
Function signature searches can query generics, wrapped in angle brackets, and
traits are normalized like types in the search engine. For example, a function
with the signature `fn my_function<I: Iterator<Item=u32>>(input: I) -> usize`
can be matched with the following queries:
* `Iterator<u32> -> usize`
* `trait:Iterator<primitive:u32> -> primitive:usize`
* `Iterator -> usize`
### Changing displayed theme ### Changing displayed theme
You can change the displayed theme by opening the settings menu (the gear You can change the displayed theme by opening the settings menu (the gear

View file

@ -354,12 +354,15 @@ function initSearch(rawSearchIndex) {
if (isInGenerics) { if (isInGenerics) {
parserState.genericsElems += 1; parserState.genericsElems += 1;
} }
const typeFilter = parserState.typeFilter;
parserState.typeFilter = null;
return { return {
name: name, name: name,
fullPath: pathSegments, fullPath: pathSegments,
pathWithoutLast: pathSegments.slice(0, pathSegments.length - 1), pathWithoutLast: pathSegments.slice(0, pathSegments.length - 1),
pathLast: pathSegments[pathSegments.length - 1], pathLast: pathSegments[pathSegments.length - 1],
generics: generics, generics: generics,
typeFilter,
}; };
} }
@ -495,6 +498,11 @@ function initSearch(rawSearchIndex) {
*/ */
function getItemsBefore(query, parserState, elems, endChar) { function getItemsBefore(query, parserState, elems, endChar) {
let foundStopChar = true; let foundStopChar = true;
let start = parserState.pos;
// If this is a generic, keep the outer item's type filter around.
const oldTypeFilter = parserState.typeFilter;
parserState.typeFilter = null;
while (parserState.pos < parserState.length) { while (parserState.pos < parserState.length) {
const c = parserState.userQuery[parserState.pos]; const c = parserState.userQuery[parserState.pos];
@ -506,7 +514,25 @@ function initSearch(rawSearchIndex) {
continue; continue;
} else if (c === ":" && isPathStart(parserState)) { } else if (c === ":" && isPathStart(parserState)) {
throw ["Unexpected ", "::", ": paths cannot start with ", "::"]; throw ["Unexpected ", "::", ": paths cannot start with ", "::"];
} else if (c === ":" || isEndCharacter(c)) { } else if (c === ":") {
if (parserState.typeFilter !== null) {
throw ["Unexpected ", ":"];
}
if (elems.length === 0) {
throw ["Expected type filter before ", ":"];
} else if (query.literalSearch) {
throw ["You cannot use quotes on type filter"];
}
// The type filter doesn't count as an element since it's a modifier.
const typeFilterElem = elems.pop();
checkExtraTypeFilterCharacters(start, parserState);
parserState.typeFilter = typeFilterElem.name;
parserState.pos += 1;
parserState.totalElems -= 1;
query.literalSearch = false;
foundStopChar = true;
continue;
} else if (isEndCharacter(c)) {
let extra = ""; let extra = "";
if (endChar === ">") { if (endChar === ">") {
extra = "<"; extra = "<";
@ -540,15 +566,10 @@ function initSearch(rawSearchIndex) {
]; ];
} }
const posBefore = parserState.pos; const posBefore = parserState.pos;
start = parserState.pos;
getNextElem(query, parserState, elems, endChar === ">"); getNextElem(query, parserState, elems, endChar === ">");
if (endChar !== "") { if (endChar !== "" && parserState.pos >= parserState.length) {
if (parserState.pos >= parserState.length) { throw ["Unclosed ", "<"];
throw ["Unclosed ", "<"];
}
const c2 = parserState.userQuery[parserState.pos];
if (!isSeparatorCharacter(c2) && c2 !== endChar) {
throw ["Expected ", endChar, ", found ", c2];
}
} }
// This case can be encountered if `getNextElem` encountered a "stop character" right // This case can be encountered if `getNextElem` encountered a "stop character" right
// from the start. For example if you have `,,` or `<>`. In this case, we simply move up // from the start. For example if you have `,,` or `<>`. In this case, we simply move up
@ -564,6 +585,8 @@ function initSearch(rawSearchIndex) {
// We are either at the end of the string or on the `endChar` character, let's move forward // We are either at the end of the string or on the `endChar` character, let's move forward
// in any case. // in any case.
parserState.pos += 1; parserState.pos += 1;
parserState.typeFilter = oldTypeFilter;
} }
/** /**
@ -572,10 +595,10 @@ function initSearch(rawSearchIndex) {
* *
* @param {ParserState} parserState * @param {ParserState} parserState
*/ */
function checkExtraTypeFilterCharacters(parserState) { function checkExtraTypeFilterCharacters(start, parserState) {
const query = parserState.userQuery; const query = parserState.userQuery;
for (let pos = 0; pos < parserState.pos; ++pos) { for (let pos = start; pos < parserState.pos; ++pos) {
if (!isIdentCharacter(query[pos]) && !isWhitespaceCharacter(query[pos])) { if (!isIdentCharacter(query[pos]) && !isWhitespaceCharacter(query[pos])) {
throw ["Unexpected ", query[pos], " in type filter"]; throw ["Unexpected ", query[pos], " in type filter"];
} }
@ -591,6 +614,7 @@ function initSearch(rawSearchIndex) {
*/ */
function parseInput(query, parserState) { function parseInput(query, parserState) {
let foundStopChar = true; let foundStopChar = true;
let start = parserState.pos;
while (parserState.pos < parserState.length) { while (parserState.pos < parserState.length) {
const c = parserState.userQuery[parserState.pos]; const c = parserState.userQuery[parserState.pos];
@ -612,16 +636,15 @@ function initSearch(rawSearchIndex) {
} }
if (query.elems.length === 0) { if (query.elems.length === 0) {
throw ["Expected type filter before ", ":"]; throw ["Expected type filter before ", ":"];
} else if (query.elems.length !== 1 || parserState.totalElems !== 1) {
throw ["Unexpected ", ":"];
} else if (query.literalSearch) { } else if (query.literalSearch) {
throw ["You cannot use quotes on type filter"]; throw ["You cannot use quotes on type filter"];
} }
checkExtraTypeFilterCharacters(parserState);
// The type filter doesn't count as an element since it's a modifier. // The type filter doesn't count as an element since it's a modifier.
parserState.typeFilter = query.elems.pop().name; const typeFilterElem = query.elems.pop();
checkExtraTypeFilterCharacters(start, parserState);
parserState.typeFilter = typeFilterElem.name;
parserState.pos += 1; parserState.pos += 1;
parserState.totalElems = 0; parserState.totalElems -= 1;
query.literalSearch = false; query.literalSearch = false;
foundStopChar = true; foundStopChar = true;
continue; continue;
@ -653,6 +676,7 @@ function initSearch(rawSearchIndex) {
]; ];
} }
const before = query.elems.length; const before = query.elems.length;
start = parserState.pos;
getNextElem(query, parserState, query.elems, false); getNextElem(query, parserState, query.elems, false);
if (query.elems.length === before) { if (query.elems.length === before) {
// Nothing was added, weird... Let's increase the position to not remain stuck. // Nothing was added, weird... Let's increase the position to not remain stuck.
@ -660,6 +684,9 @@ function initSearch(rawSearchIndex) {
} }
foundStopChar = false; foundStopChar = false;
} }
if (parserState.typeFilter !== null) {
throw ["Unexpected ", ":", " (expected path after type filter)"];
}
while (parserState.pos < parserState.length) { while (parserState.pos < parserState.length) {
if (isReturnArrow(parserState)) { if (isReturnArrow(parserState)) {
parserState.pos += 2; parserState.pos += 2;
@ -687,7 +714,6 @@ function initSearch(rawSearchIndex) {
return { return {
original: userQuery, original: userQuery,
userQuery: userQuery.toLowerCase(), userQuery: userQuery.toLowerCase(),
typeFilter: NO_TYPE_FILTER,
elems: [], elems: [],
returned: [], returned: [],
// Total number of "top" elements (does not include generics). // Total number of "top" elements (does not include generics).
@ -738,8 +764,8 @@ function initSearch(rawSearchIndex) {
* *
* ident = *(ALPHA / DIGIT / "_") * ident = *(ALPHA / DIGIT / "_")
* path = ident *(DOUBLE-COLON ident) [!] * path = ident *(DOUBLE-COLON ident) [!]
* arg = path [generics] * arg = [type-filter *WS COLON *WS] path [generics]
* arg-without-generic = path * arg-without-generic = [type-filter *WS COLON *WS] path
* type-sep = COMMA/WS *(COMMA/WS) * type-sep = COMMA/WS *(COMMA/WS)
* nonempty-arg-list = *(type-sep) arg *(type-sep arg) *(type-sep) * nonempty-arg-list = *(type-sep) arg *(type-sep arg) *(type-sep)
* nonempty-arg-list-without-generics = *(type-sep) arg-without-generic * nonempty-arg-list-without-generics = *(type-sep) arg-without-generic
@ -749,7 +775,7 @@ function initSearch(rawSearchIndex) {
* return-args = RETURN-ARROW *(type-sep) nonempty-arg-list * return-args = RETURN-ARROW *(type-sep) nonempty-arg-list
* *
* exact-search = [type-filter *WS COLON] [ RETURN-ARROW ] *WS QUOTE ident QUOTE [ generics ] * exact-search = [type-filter *WS COLON] [ RETURN-ARROW ] *WS QUOTE ident QUOTE [ generics ]
* type-search = [type-filter *WS COLON] [ nonempty-arg-list ] [ return-args ] * type-search = [ nonempty-arg-list ] [ return-args ]
* *
* query = *WS (exact-search / type-search) *WS * query = *WS (exact-search / type-search) *WS
* *
@ -798,6 +824,20 @@ function initSearch(rawSearchIndex) {
* @return {ParsedQuery} - The parsed query * @return {ParsedQuery} - The parsed query
*/ */
function parseQuery(userQuery) { function parseQuery(userQuery) {
function convertTypeFilterOnElem(elem) {
if (elem.typeFilter !== null) {
let typeFilter = elem.typeFilter;
if (typeFilter === "const") {
typeFilter = "constant";
}
elem.typeFilter = itemTypeFromName(typeFilter);
} else {
elem.typeFilter = NO_TYPE_FILTER;
}
for (const elem2 of elem.generics) {
convertTypeFilterOnElem(elem2);
}
}
userQuery = userQuery.trim(); userQuery = userQuery.trim();
const parserState = { const parserState = {
length: userQuery.length, length: userQuery.length,
@ -812,17 +852,15 @@ function initSearch(rawSearchIndex) {
try { try {
parseInput(query, parserState); parseInput(query, parserState);
if (parserState.typeFilter !== null) { for (const elem of query.elems) {
let typeFilter = parserState.typeFilter; convertTypeFilterOnElem(elem);
if (typeFilter === "const") { }
typeFilter = "constant"; for (const elem of query.returned) {
} convertTypeFilterOnElem(elem);
query.typeFilter = itemTypeFromName(typeFilter);
} }
} catch (err) { } catch (err) {
query = newParsedQuery(userQuery); query = newParsedQuery(userQuery);
query.error = err; query.error = err;
query.typeFilter = -1;
return query; return query;
} }
@ -1057,12 +1095,10 @@ function initSearch(rawSearchIndex) {
} }
// The names match, but we need to be sure that all generics kinda // The names match, but we need to be sure that all generics kinda
// match as well. // match as well.
let elem_name;
if (elem.generics.length > 0 && row.generics.length >= elem.generics.length) { if (elem.generics.length > 0 && row.generics.length >= elem.generics.length) {
const elems = Object.create(null); const elems = Object.create(null);
for (const entry of row.generics) { for (const entry of row.generics) {
elem_name = entry.name; if (entry.name === "") {
if (elem_name === "") {
// Pure generic, needs to check into it. // Pure generic, needs to check into it.
if (checkGenerics(entry, elem, maxEditDistance + 1, maxEditDistance) if (checkGenerics(entry, elem, maxEditDistance + 1, maxEditDistance)
!== 0) { !== 0) {
@ -1070,19 +1106,19 @@ function initSearch(rawSearchIndex) {
} }
continue; continue;
} }
if (elems[elem_name] === undefined) { if (elems[entry.name] === undefined) {
elems[elem_name] = 0; elems[entry.name] = [];
} }
elems[elem_name] += 1; elems[entry.name].push(entry.ty);
} }
// We need to find the type that matches the most to remove it in order // We need to find the type that matches the most to remove it in order
// to move forward. // to move forward.
for (const generic of elem.generics) { const handleGeneric = generic => {
let match = null; let match = null;
if (elems[generic.name]) { if (elems[generic.name]) {
match = generic.name; match = generic.name;
} else { } else {
for (elem_name in elems) { for (const elem_name in elems) {
if (!hasOwnPropertyRustdoc(elems, elem_name)) { if (!hasOwnPropertyRustdoc(elems, elem_name)) {
continue; continue;
} }
@ -1093,11 +1129,31 @@ function initSearch(rawSearchIndex) {
} }
} }
if (match === null) { if (match === null) {
return false;
}
const matchIdx = elems[match].findIndex(tmp_elem =>
typePassesFilter(generic.typeFilter, tmp_elem));
if (matchIdx === -1) {
return false;
}
elems[match].splice(matchIdx, 1);
if (elems[match].length === 0) {
delete elems[match];
}
return true;
};
// To do the right thing with type filters, we first process generics
// that have them, removing matching ones from the "bag," then do the
// ones with no type filter, which can match any entry regardless of its
// own type.
for (const generic of elem.generics) {
if (generic.typeFilter !== -1 && !handleGeneric(generic)) {
return maxEditDistance + 1; return maxEditDistance + 1;
} }
elems[match] -= 1; }
if (elems[match] === 0) { for (const generic of elem.generics) {
delete elems[match]; if (generic.typeFilter === -1 && !handleGeneric(generic)) {
return maxEditDistance + 1;
} }
} }
return 0; return 0;
@ -1145,14 +1201,20 @@ function initSearch(rawSearchIndex) {
return maxEditDistance + 1; return maxEditDistance + 1;
} }
let dist = editDistance(row.name, elem.name, maxEditDistance); let dist;
if (typePassesFilter(elem.typeFilter, row.ty)) {
dist = editDistance(row.name, elem.name, maxEditDistance);
} else {
dist = maxEditDistance + 1;
}
if (literalSearch) { if (literalSearch) {
if (dist !== 0) { if (dist !== 0) {
// The name didn't match, let's try to check if the generics do. // The name didn't match, let's try to check if the generics do.
if (elem.generics.length === 0) { if (elem.generics.length === 0) {
const checkGeneric = row.generics.length > 0; const checkGeneric = row.generics.length > 0;
if (checkGeneric && row.generics if (checkGeneric && row.generics
.findIndex(tmp_elem => tmp_elem.name === elem.name) !== -1) { .findIndex(tmp_elem => tmp_elem.name === elem.name &&
typePassesFilter(elem.typeFilter, tmp_elem.ty)) !== -1) {
return 0; return 0;
} }
} }
@ -1201,22 +1263,21 @@ function initSearch(rawSearchIndex) {
* *
* @param {Row} row * @param {Row} row
* @param {QueryElement} elem - The element from the parsed query. * @param {QueryElement} elem - The element from the parsed query.
* @param {integer} typeFilter * @param {integer} maxEditDistance
* @param {Array<integer>} skipPositions - Do not return one of these positions. * @param {Array<integer>} skipPositions - Do not return one of these positions.
* *
* @return {dist: integer, position: integer} - Returns an edit distance to the best match. * @return {dist: integer, position: integer} - Returns an edit distance to the best match.
* If there is no match, returns * If there is no match, returns
* `maxEditDistance + 1` and position: -1. * `maxEditDistance + 1` and position: -1.
*/ */
function findArg(row, elem, typeFilter, maxEditDistance, skipPositions) { function findArg(row, elem, maxEditDistance, skipPositions) {
let dist = maxEditDistance + 1; let dist = maxEditDistance + 1;
let position = -1; let position = -1;
if (row && row.type && row.type.inputs && row.type.inputs.length > 0) { if (row && row.type && row.type.inputs && row.type.inputs.length > 0) {
let i = 0; let i = 0;
for (const input of row.type.inputs) { for (const input of row.type.inputs) {
if (!typePassesFilter(typeFilter, input.ty) || if (skipPositions.indexOf(i) !== -1) {
skipPositions.indexOf(i) !== -1) {
i += 1; i += 1;
continue; continue;
} }
@ -1245,14 +1306,14 @@ function initSearch(rawSearchIndex) {
* *
* @param {Row} row * @param {Row} row
* @param {QueryElement} elem - The element from the parsed query. * @param {QueryElement} elem - The element from the parsed query.
* @param {integer} typeFilter * @param {integer} maxEditDistance
* @param {Array<integer>} skipPositions - Do not return one of these positions. * @param {Array<integer>} skipPositions - Do not return one of these positions.
* *
* @return {dist: integer, position: integer} - Returns an edit distance to the best match. * @return {dist: integer, position: integer} - Returns an edit distance to the best match.
* If there is no match, returns * If there is no match, returns
* `maxEditDistance + 1` and position: -1. * `maxEditDistance + 1` and position: -1.
*/ */
function checkReturned(row, elem, typeFilter, maxEditDistance, skipPositions) { function checkReturned(row, elem, maxEditDistance, skipPositions) {
let dist = maxEditDistance + 1; let dist = maxEditDistance + 1;
let position = -1; let position = -1;
@ -1260,8 +1321,7 @@ function initSearch(rawSearchIndex) {
const ret = row.type.output; const ret = row.type.output;
let i = 0; let i = 0;
for (const ret_ty of ret) { for (const ret_ty of ret) {
if (!typePassesFilter(typeFilter, ret_ty.ty) || if (skipPositions.indexOf(i) !== -1) {
skipPositions.indexOf(i) !== -1) {
i += 1; i += 1;
continue; continue;
} }
@ -1483,15 +1543,15 @@ function initSearch(rawSearchIndex) {
const fullId = row.id; const fullId = row.id;
const searchWord = searchWords[pos]; const searchWord = searchWords[pos];
const in_args = findArg(row, elem, parsedQuery.typeFilter, maxEditDistance, []); const in_args = findArg(row, elem, maxEditDistance, []);
const returned = checkReturned(row, elem, parsedQuery.typeFilter, maxEditDistance, []); const returned = checkReturned(row, elem, maxEditDistance, []);
// path_dist is 0 because no parent path information is currently stored // path_dist is 0 because no parent path information is currently stored
// in the search index // in the search index
addIntoResults(results_in_args, fullId, pos, -1, in_args.dist, 0, maxEditDistance); addIntoResults(results_in_args, fullId, pos, -1, in_args.dist, 0, maxEditDistance);
addIntoResults(results_returned, fullId, pos, -1, returned.dist, 0, maxEditDistance); addIntoResults(results_returned, fullId, pos, -1, returned.dist, 0, maxEditDistance);
if (!typePassesFilter(parsedQuery.typeFilter, row.ty)) { if (!typePassesFilter(elem.typeFilter, row.ty)) {
return; return;
} }
@ -1568,7 +1628,6 @@ function initSearch(rawSearchIndex) {
const { dist, position } = callback( const { dist, position } = callback(
row, row,
elem, elem,
NO_TYPE_FILTER,
maxEditDistance, maxEditDistance,
skipPositions skipPositions
); );
@ -1632,7 +1691,6 @@ function initSearch(rawSearchIndex) {
in_returned = checkReturned( in_returned = checkReturned(
row, row,
elem, elem,
parsedQuery.typeFilter,
maxEditDistance, maxEditDistance,
[] []
); );

View file

@ -79,11 +79,18 @@ function checkNeededFields(fullPath, expected, error_text, queryName, position)
"foundElems", "foundElems",
"original", "original",
"returned", "returned",
"typeFilter",
"userQuery", "userQuery",
"error", "error",
]; ];
} else if (fullPath.endsWith("elems") || fullPath.endsWith("generics")) { } else if (fullPath.endsWith("elems") || fullPath.endsWith("returned")) {
fieldsToCheck = [
"name",
"fullPath",
"pathWithoutLast",
"pathLast",
"generics",
];
} else if (fullPath.endsWith("generics")) {
fieldsToCheck = [ fieldsToCheck = [
"name", "name",
"fullPath", "fullPath",

View file

@ -1,6 +1,7 @@
// assembly-output: emit-asm // assembly-output: emit-asm
// min-llvm-version: 15.0 // min-llvm-version: 15.0
// only-x86_64 // only-x86_64
// ignore-sgx
// revisions: opt-speed opt-size // revisions: opt-speed opt-size
// [opt-speed] compile-flags: -Copt-level=1 // [opt-speed] compile-flags: -Copt-level=1
// [opt-size] compile-flags: -Copt-level=s // [opt-size] compile-flags: -Copt-level=s

View file

@ -1,6 +1,7 @@
// assembly-output: emit-asm // assembly-output: emit-asm
// compile-flags: -Copt-level=1 // compile-flags: -Copt-level=1
// only-x86_64 // only-x86_64
// ignore-sgx
// min-llvm-version: 15.0 // min-llvm-version: 15.0
#![crate_type = "rlib"] #![crate_type = "rlib"]

View file

@ -4,6 +4,7 @@
// assembly-output: emit-asm // assembly-output: emit-asm
// compile-flags: --crate-type=lib -O -C llvm-args=-x86-asm-syntax=intel // compile-flags: --crate-type=lib -O -C llvm-args=-x86-asm-syntax=intel
// only-x86_64 // only-x86_64
// ignore-sgx
// CHECK-LABEL: clamp_demo: // CHECK-LABEL: clamp_demo:
#[no_mangle] #[no_mangle]

View file

@ -11,7 +11,7 @@ pub extern fn plus_one(r: &mut u64) {
// CHECK: plus_one // CHECK: plus_one
// CHECK: lfence // CHECK: lfence
// CHECK-NEXT: addq // CHECK-NEXT: incq
// CHECK: popq [[REGISTER:%[a-z]+]] // CHECK: popq [[REGISTER:%[a-z]+]]
// CHECK-NEXT: lfence // CHECK-NEXT: lfence
// CHECK-NEXT: jmpq *[[REGISTER]] // CHECK-NEXT: jmpq *[[REGISTER]]

View file

@ -10,9 +10,7 @@ use std::arch::asm;
pub extern "C" fn get(ptr: *const u64) -> u64 { pub extern "C" fn get(ptr: *const u64) -> u64 {
let value: u64; let value: u64;
unsafe { unsafe {
asm!(".start_inline_asm:", asm!("mov {}, [{}]",
"mov {}, [{}]",
".end_inline_asm:",
out(reg) value, out(reg) value,
in(reg) ptr); in(reg) ptr);
} }
@ -20,24 +18,17 @@ pub extern "C" fn get(ptr: *const u64) -> u64 {
} }
// CHECK: get // CHECK: get
// CHECK: .start_inline_asm // CHECK: movq
// CHECK-NEXT: movq
// CHECK-NEXT: lfence // CHECK-NEXT: lfence
// CHECK-NEXT: .end_inline_asm
#[no_mangle] #[no_mangle]
pub extern "C" fn myret() { pub extern "C" fn myret() {
unsafe { unsafe {
asm!( asm!("ret");
".start_myret_inline_asm:",
"ret",
".end_myret_inline_asm:",
);
} }
} }
// CHECK: myret // CHECK: myret
// CHECK: .start_myret_inline_asm // CHECK: shlq $0, (%rsp)
// CHECK-NEXT: shlq $0, (%rsp)
// CHECK-NEXT: lfence // CHECK-NEXT: lfence
// CHECK-NEXT: retq // CHECK-NEXT: retq

View file

@ -6,6 +6,7 @@
// compile-flags: -O // compile-flags: -O
// [set] compile-flags: -Zno-jump-tables // [set] compile-flags: -Zno-jump-tables
// only-x86_64 // only-x86_64
// ignore-sgx
#![crate_type = "lib"] #![crate_type = "lib"]

View file

@ -4,6 +4,7 @@
# ignore-nvptx64-nvidia-cuda FIXME: can't find crate for `std` # ignore-nvptx64-nvidia-cuda FIXME: can't find crate for `std`
# ignore-musl FIXME: this makefile needs teaching how to use a musl toolchain # ignore-musl FIXME: this makefile needs teaching how to use a musl toolchain
# (see dist-i586-gnu-i586-i686-musl Dockerfile) # (see dist-i586-gnu-i586-i686-musl Dockerfile)
# ignore-sgx
include ../../run-make-fulldeps/tools.mk include ../../run-make-fulldeps/tools.mk

View file

@ -1,8 +1,7 @@
CHECK: cc_plus_one_asm CHECK: cc_plus_one_asm
CHECK-NEXT: movl CHECK-NEXT: movl
CHECK-NEXT: lfence CHECK-NEXT: lfence
CHECK-NEXT: inc CHECK-NEXT: incl
CHECK-NEXT: notq (%rsp) CHECK-NEXT: shlq $0, (%rsp)
CHECK-NEXT: notq (%rsp)
CHECK-NEXT: lfence CHECK-NEXT: lfence
CHECK-NEXT: retq CHECK-NEXT: retq

View file

@ -1,8 +1,24 @@
CHECK: libunwind::Registers_x86_64::jumpto CHECK: __libunwind_Registers_x86_64_jumpto
CHECK: lfence CHECK: lfence
CHECK: lfence CHECK: lfence
CHECK: lfence CHECK: lfence
CHECK: lfence CHECK: lfence
CHECK: shlq $0, (%rsp) CHECK: lfence
CHECK: lfence
CHECK: lfence
CHECK: lfence
CHECK: lfence
CHECK: lfence
CHECK: lfence
CHECK: lfence
CHECK: lfence
CHECK: lfence
CHECK: lfence
CHECK: lfence
CHECK: lfence
CHECK: lfence
CHECK-NEXT: popq [[REGISTER:%[a-z]+]]
CHECK-NEXT: lfence CHECK-NEXT: lfence
CHECK-NEXT: retq CHECK-NEXT: popq [[REGISTER:%[a-z]+]]
CHECK-NEXT: lfence
CHECK-NEXT: jmpq *[[REGISTER]]

View file

@ -2,6 +2,5 @@ CHECK: print
CHECK: lfence CHECK: lfence
CHECK: lfence CHECK: lfence
CHECK: lfence CHECK: lfence
CHECK: popq
CHECK: callq 0x{{[[:xdigit:]]*}} <_Unwind_Resume> CHECK: callq 0x{{[[:xdigit:]]*}} <_Unwind_Resume>
CHECK-NEXT: ud2 CHECK-NEXT: ud2

View file

@ -20,39 +20,38 @@ function build {
} }
function check { function check {
local func=$1 local func_re="$1"
local checks="${TEST_DIR}/$2" local checks="${TEST_DIR}/$2"
local asm=$(mktemp) local asm=$(mktemp)
local objdump="${BUILD_DIR}/x86_64-unknown-linux-gnu/llvm/build/bin/llvm-objdump" local objdump="${LLVM_BIN_DIR}/llvm-objdump"
local filecheck="${BUILD_DIR}/x86_64-unknown-linux-gnu/llvm/build/bin/FileCheck" local filecheck="${LLVM_BIN_DIR}/FileCheck"
local enclave=${WORK_DIR}/enclave/target/x86_64-fortanix-unknown-sgx/debug/enclave
${objdump} --disassemble-symbols=${func} --demangle \ func="$(${objdump} --syms --demangle ${enclave} | \
${WORK_DIR}/enclave/target/x86_64-fortanix-unknown-sgx/debug/enclave > ${asm} grep --only-matching -E "[[:blank:]]+${func_re}\$" | \
sed -e 's/^[[:space:]]*//' )"
${objdump} --disassemble-symbols="${func}" --demangle \
${enclave} > ${asm}
${filecheck} --input-file ${asm} ${checks} ${filecheck} --input-file ${asm} ${checks}
} }
build build
check unw_getcontext unw_getcontext.checks check "unw_getcontext" unw_getcontext.checks
check "libunwind::Registers_x86_64::jumpto()" jumpto.checks check "__libunwind_Registers_x86_64_jumpto" jumpto.checks
check "std::io::stdio::_print::h87f0c238421c45bc" print.checks check 'std::io::stdio::_print::[[:alnum:]]+' print.checks
check rust_plus_one_global_asm rust_plus_one_global_asm.checks \ check rust_plus_one_global_asm rust_plus_one_global_asm.checks
|| echo "warning: module level assembly currently not hardened"
check cc_plus_one_c cc_plus_one_c.checks check cc_plus_one_c cc_plus_one_c.checks
check cc_plus_one_c_asm cc_plus_one_c_asm.checks check cc_plus_one_c_asm cc_plus_one_c_asm.checks
check cc_plus_one_cxx cc_plus_one_cxx.checks check cc_plus_one_cxx cc_plus_one_cxx.checks
check cc_plus_one_cxx_asm cc_plus_one_cxx_asm.checks check cc_plus_one_cxx_asm cc_plus_one_cxx_asm.checks
check cc_plus_one_asm cc_plus_one_asm.checks \ check cc_plus_one_asm cc_plus_one_asm.checks
|| echo "warning: the cc crate forwards assembly files to the CC compiler." \
"Clang uses its own integrated assembler, which does not include the LVI passes."
check cmake_plus_one_c cmake_plus_one_c.checks check cmake_plus_one_c cmake_plus_one_c.checks
check cmake_plus_one_c_asm cmake_plus_one_c_asm.checks check cmake_plus_one_c_asm cmake_plus_one_c_asm.checks
check cmake_plus_one_c_global_asm cmake_plus_one_c_global_asm.checks \ check cmake_plus_one_c_global_asm cmake_plus_one_c_global_asm.checks
|| echo "warning: module level assembly currently not hardened"
check cmake_plus_one_cxx cmake_plus_one_cxx.checks check cmake_plus_one_cxx cmake_plus_one_cxx.checks
check cmake_plus_one_cxx_asm cmake_plus_one_cxx_asm.checks check cmake_plus_one_cxx_asm cmake_plus_one_cxx_asm.checks
check cmake_plus_one_cxx_global_asm cmake_plus_one_cxx_global_asm.checks \ check cmake_plus_one_cxx_global_asm cmake_plus_one_cxx_global_asm.checks
|| echo "warning: module level assembly currently not hardened"
check cmake_plus_one_asm cmake_plus_one_asm.checks check cmake_plus_one_asm cmake_plus_one_asm.checks

View file

@ -17,6 +17,7 @@ const QUERY = [
"a b:", "a b:",
"a (b:", "a (b:",
"_:", "_:",
"_:a",
"a-bb", "a-bb",
"a>bb", "a>bb",
"ab'", "ab'",
@ -48,7 +49,6 @@ const PARSED = [
foundElems: 0, foundElems: 0,
original: "<P>", original: "<P>",
returned: [], returned: [],
typeFilter: -1,
userQuery: "<p>", userQuery: "<p>",
error: "Found generics without a path", error: "Found generics without a path",
}, },
@ -57,7 +57,6 @@ const PARSED = [
foundElems: 0, foundElems: 0,
original: "-> <P>", original: "-> <P>",
returned: [], returned: [],
typeFilter: -1,
userQuery: "-> <p>", userQuery: "-> <p>",
error: "Found generics without a path", error: "Found generics without a path",
}, },
@ -66,7 +65,6 @@ const PARSED = [
foundElems: 0, foundElems: 0,
original: "a<\"P\">", original: "a<\"P\">",
returned: [], returned: [],
typeFilter: -1,
userQuery: "a<\"p\">", userQuery: "a<\"p\">",
error: "Unexpected `\"` in generics", error: "Unexpected `\"` in generics",
}, },
@ -75,7 +73,6 @@ const PARSED = [
foundElems: 0, foundElems: 0,
original: "\"P\" \"P\"", original: "\"P\" \"P\"",
returned: [], returned: [],
typeFilter: -1,
userQuery: "\"p\" \"p\"", userQuery: "\"p\" \"p\"",
error: "Cannot have more than one literal search element", error: "Cannot have more than one literal search element",
}, },
@ -84,7 +81,6 @@ const PARSED = [
foundElems: 0, foundElems: 0,
original: "P \"P\"", original: "P \"P\"",
returned: [], returned: [],
typeFilter: -1,
userQuery: "p \"p\"", userQuery: "p \"p\"",
error: "Cannot use literal search when there is more than one element", error: "Cannot use literal search when there is more than one element",
}, },
@ -93,7 +89,6 @@ const PARSED = [
foundElems: 0, foundElems: 0,
original: "\"p\" p", original: "\"p\" p",
returned: [], returned: [],
typeFilter: -1,
userQuery: "\"p\" p", userQuery: "\"p\" p",
error: "You cannot have more than one element if you use quotes", error: "You cannot have more than one element if you use quotes",
}, },
@ -102,7 +97,6 @@ const PARSED = [
foundElems: 0, foundElems: 0,
original: "\"const\": p", original: "\"const\": p",
returned: [], returned: [],
typeFilter: -1,
userQuery: "\"const\": p", userQuery: "\"const\": p",
error: "You cannot use quotes on type filter", error: "You cannot use quotes on type filter",
}, },
@ -111,16 +105,14 @@ const PARSED = [
foundElems: 0, foundElems: 0,
original: "a<:a>", original: "a<:a>",
returned: [], returned: [],
typeFilter: -1,
userQuery: "a<:a>", userQuery: "a<:a>",
error: "Unexpected `:` after `<`", error: "Expected type filter before `:`",
}, },
{ {
elems: [], elems: [],
foundElems: 0, foundElems: 0,
original: "a<::a>", original: "a<::a>",
returned: [], returned: [],
typeFilter: -1,
userQuery: "a<::a>", userQuery: "a<::a>",
error: "Unexpected `::`: paths cannot start with `::`", error: "Unexpected `::`: paths cannot start with `::`",
}, },
@ -129,7 +121,6 @@ const PARSED = [
foundElems: 0, foundElems: 0,
original: "((a))", original: "((a))",
returned: [], returned: [],
typeFilter: -1,
userQuery: "((a))", userQuery: "((a))",
error: "Unexpected `(`", error: "Unexpected `(`",
}, },
@ -138,7 +129,6 @@ const PARSED = [
foundElems: 0, foundElems: 0,
original: "(p -> p", original: "(p -> p",
returned: [], returned: [],
typeFilter: -1,
userQuery: "(p -> p", userQuery: "(p -> p",
error: "Unexpected `(`", error: "Unexpected `(`",
}, },
@ -147,7 +137,6 @@ const PARSED = [
foundElems: 0, foundElems: 0,
original: "::a::b", original: "::a::b",
returned: [], returned: [],
typeFilter: -1,
userQuery: "::a::b", userQuery: "::a::b",
error: "Paths cannot start with `::`", error: "Paths cannot start with `::`",
}, },
@ -156,7 +145,6 @@ const PARSED = [
foundElems: 0, foundElems: 0,
original: "a::::b", original: "a::::b",
returned: [], returned: [],
typeFilter: -1,
userQuery: "a::::b", userQuery: "a::::b",
error: "Unexpected `::::`", error: "Unexpected `::::`",
}, },
@ -165,7 +153,6 @@ const PARSED = [
foundElems: 0, foundElems: 0,
original: "a::b::", original: "a::b::",
returned: [], returned: [],
typeFilter: -1,
userQuery: "a::b::", userQuery: "a::b::",
error: "Paths cannot end with `::`", error: "Paths cannot end with `::`",
}, },
@ -174,7 +161,6 @@ const PARSED = [
foundElems: 0, foundElems: 0,
original: ":a", original: ":a",
returned: [], returned: [],
typeFilter: -1,
userQuery: ":a", userQuery: ":a",
error: "Expected type filter before `:`", error: "Expected type filter before `:`",
}, },
@ -183,16 +169,14 @@ const PARSED = [
foundElems: 0, foundElems: 0,
original: "a b:", original: "a b:",
returned: [], returned: [],
typeFilter: -1,
userQuery: "a b:", userQuery: "a b:",
error: "Unexpected `:`", error: "Unexpected `:` (expected path after type filter)",
}, },
{ {
elems: [], elems: [],
foundElems: 0, foundElems: 0,
original: "a (b:", original: "a (b:",
returned: [], returned: [],
typeFilter: -1,
userQuery: "a (b:", userQuery: "a (b:",
error: "Unexpected `(`", error: "Unexpected `(`",
}, },
@ -201,8 +185,15 @@ const PARSED = [
foundElems: 0, foundElems: 0,
original: "_:", original: "_:",
returned: [], returned: [],
typeFilter: -1,
userQuery: "_:", userQuery: "_:",
error: "Unexpected `:` (expected path after type filter)",
},
{
elems: [],
foundElems: 0,
original: "_:a",
returned: [],
userQuery: "_:a",
error: "Unknown type filter `_`", error: "Unknown type filter `_`",
}, },
{ {
@ -210,7 +201,6 @@ const PARSED = [
foundElems: 0, foundElems: 0,
original: "a-bb", original: "a-bb",
returned: [], returned: [],
typeFilter: -1,
userQuery: "a-bb", userQuery: "a-bb",
error: "Unexpected `-` (did you mean `->`?)", error: "Unexpected `-` (did you mean `->`?)",
}, },
@ -219,7 +209,6 @@ const PARSED = [
foundElems: 0, foundElems: 0,
original: "a>bb", original: "a>bb",
returned: [], returned: [],
typeFilter: -1,
userQuery: "a>bb", userQuery: "a>bb",
error: "Unexpected `>` (did you mean `->`?)", error: "Unexpected `>` (did you mean `->`?)",
}, },
@ -228,7 +217,6 @@ const PARSED = [
foundElems: 0, foundElems: 0,
original: "ab'", original: "ab'",
returned: [], returned: [],
typeFilter: -1,
userQuery: "ab'", userQuery: "ab'",
error: "Unexpected `'`", error: "Unexpected `'`",
}, },
@ -237,7 +225,6 @@ const PARSED = [
foundElems: 0, foundElems: 0,
original: "a->", original: "a->",
returned: [], returned: [],
typeFilter: -1,
userQuery: "a->", userQuery: "a->",
error: "Expected at least one item after `->`", error: "Expected at least one item after `->`",
}, },
@ -246,7 +233,6 @@ const PARSED = [
foundElems: 0, foundElems: 0,
original: '"p" <a>', original: '"p" <a>',
returned: [], returned: [],
typeFilter: -1,
userQuery: '"p" <a>', userQuery: '"p" <a>',
error: "Found generics without a path", error: "Found generics without a path",
}, },
@ -255,7 +241,6 @@ const PARSED = [
foundElems: 0, foundElems: 0,
original: '"p" a<a>', original: '"p" a<a>',
returned: [], returned: [],
typeFilter: -1,
userQuery: '"p" a<a>', userQuery: '"p" a<a>',
error: "You cannot have more than one element if you use quotes", error: "You cannot have more than one element if you use quotes",
}, },
@ -264,7 +249,6 @@ const PARSED = [
foundElems: 0, foundElems: 0,
original: 'a,<', original: 'a,<',
returned: [], returned: [],
typeFilter: -1,
userQuery: 'a,<', userQuery: 'a,<',
error: 'Found generics without a path', error: 'Found generics without a path',
}, },
@ -273,7 +257,6 @@ const PARSED = [
foundElems: 0, foundElems: 0,
original: 'aaaaa<>b', original: 'aaaaa<>b',
returned: [], returned: [],
typeFilter: -1,
userQuery: 'aaaaa<>b', userQuery: 'aaaaa<>b',
error: 'Expected `,`, ` `, `:` or `->`, found `b`', error: 'Expected `,`, ` `, `:` or `->`, found `b`',
}, },
@ -282,16 +265,14 @@ const PARSED = [
foundElems: 0, foundElems: 0,
original: 'fn:aaaaa<>b', original: 'fn:aaaaa<>b',
returned: [], returned: [],
typeFilter: -1,
userQuery: 'fn:aaaaa<>b', userQuery: 'fn:aaaaa<>b',
error: 'Expected `,`, ` ` or `->`, found `b`', error: 'Expected `,`, ` `, `:` or `->`, found `b`',
}, },
{ {
elems: [], elems: [],
foundElems: 0, foundElems: 0,
original: '->a<>b', original: '->a<>b',
returned: [], returned: [],
typeFilter: -1,
userQuery: '->a<>b', userQuery: '->a<>b',
error: 'Expected `,` or ` `, found `b`', error: 'Expected `,` or ` `, found `b`',
}, },
@ -300,7 +281,6 @@ const PARSED = [
foundElems: 0, foundElems: 0,
original: 'a<->', original: 'a<->',
returned: [], returned: [],
typeFilter: -1,
userQuery: 'a<->', userQuery: 'a<->',
error: 'Unexpected `-` after `<`', error: 'Unexpected `-` after `<`',
}, },
@ -309,7 +289,6 @@ const PARSED = [
foundElems: 0, foundElems: 0,
original: 'a:: a', original: 'a:: a',
returned: [], returned: [],
typeFilter: -1,
userQuery: 'a:: a', userQuery: 'a:: a',
error: 'Paths cannot end with `::`', error: 'Paths cannot end with `::`',
}, },
@ -318,7 +297,6 @@ const PARSED = [
foundElems: 0, foundElems: 0,
original: 'a ::a', original: 'a ::a',
returned: [], returned: [],
typeFilter: -1,
userQuery: 'a ::a', userQuery: 'a ::a',
error: 'Paths cannot start with `::`', error: 'Paths cannot start with `::`',
}, },
@ -327,16 +305,14 @@ const PARSED = [
foundElems: 0, foundElems: 0,
original: "a<a>:", original: "a<a>:",
returned: [], returned: [],
typeFilter: -1,
userQuery: "a<a>:", userQuery: "a<a>:",
error: 'Unexpected `:`', error: 'Unexpected `<` in type filter',
}, },
{ {
elems: [], elems: [],
foundElems: 0, foundElems: 0,
original: "a<>:", original: "a<>:",
returned: [], returned: [],
typeFilter: -1,
userQuery: "a<>:", userQuery: "a<>:",
error: 'Unexpected `<` in type filter', error: 'Unexpected `<` in type filter',
}, },
@ -345,7 +321,6 @@ const PARSED = [
foundElems: 0, foundElems: 0,
original: "a,:", original: "a,:",
returned: [], returned: [],
typeFilter: -1,
userQuery: "a,:", userQuery: "a,:",
error: 'Unexpected `,` in type filter', error: 'Unexpected `,` in type filter',
}, },
@ -354,7 +329,6 @@ const PARSED = [
foundElems: 0, foundElems: 0,
original: "a<> :", original: "a<> :",
returned: [], returned: [],
typeFilter: -1,
userQuery: "a<> :", userQuery: "a<> :",
error: 'Unexpected `<` in type filter', error: 'Unexpected `<` in type filter',
}, },
@ -363,7 +337,6 @@ const PARSED = [
foundElems: 0, foundElems: 0,
original: "mod : :", original: "mod : :",
returned: [], returned: [],
typeFilter: -1,
userQuery: "mod : :", userQuery: "mod : :",
error: 'Unexpected `:`', error: 'Unexpected `:`',
}, },
@ -372,7 +345,6 @@ const PARSED = [
foundElems: 0, foundElems: 0,
original: "a!a", original: "a!a",
returned: [], returned: [],
typeFilter: -1,
userQuery: "a!a", userQuery: "a!a",
error: 'Unexpected `!`: it can only be at the end of an ident', error: 'Unexpected `!`: it can only be at the end of an ident',
}, },
@ -381,7 +353,6 @@ const PARSED = [
foundElems: 0, foundElems: 0,
original: "a!!", original: "a!!",
returned: [], returned: [],
typeFilter: -1,
userQuery: "a!!", userQuery: "a!!",
error: 'Cannot have more than one `!` in an ident', error: 'Cannot have more than one `!` in an ident',
}, },
@ -390,7 +361,6 @@ const PARSED = [
foundElems: 0, foundElems: 0,
original: "mod:a!", original: "mod:a!",
returned: [], returned: [],
typeFilter: -1,
userQuery: "mod:a!", userQuery: "mod:a!",
error: 'Invalid search type: macro `!` and `mod` both specified', error: 'Invalid search type: macro `!` and `mod` both specified',
}, },
@ -399,7 +369,6 @@ const PARSED = [
foundElems: 0, foundElems: 0,
original: "a!::a", original: "a!::a",
returned: [], returned: [],
typeFilter: -1,
userQuery: "a!::a", userQuery: "a!::a",
error: 'Cannot have associated items in macros', error: 'Cannot have associated items in macros',
}, },
@ -408,7 +377,6 @@ const PARSED = [
foundElems: 0, foundElems: 0,
original: "a<", original: "a<",
returned: [], returned: [],
typeFilter: -1,
userQuery: "a<", userQuery: "a<",
error: "Unclosed `<`", error: "Unclosed `<`",
}, },

View file

@ -1,4 +1,14 @@
const QUERY = ['fn:foo', 'enum : foo', 'macro<f>:foo', 'macro!', 'macro:mac!', 'a::mac!']; const QUERY = [
'fn:foo',
'enum : foo',
'macro<f>:foo',
'macro!',
'macro:mac!',
'a::mac!',
'-> fn:foo',
'-> fn:foo<fn:bar>',
'-> fn:foo<fn:bar, enum : baz::fuzz>',
];
const PARSED = [ const PARSED = [
{ {
@ -8,11 +18,11 @@ const PARSED = [
pathWithoutLast: [], pathWithoutLast: [],
pathLast: "foo", pathLast: "foo",
generics: [], generics: [],
typeFilter: 5,
}], }],
foundElems: 1, foundElems: 1,
original: "fn:foo", original: "fn:foo",
returned: [], returned: [],
typeFilter: 5,
userQuery: "fn:foo", userQuery: "fn:foo",
error: null, error: null,
}, },
@ -23,11 +33,11 @@ const PARSED = [
pathWithoutLast: [], pathWithoutLast: [],
pathLast: "foo", pathLast: "foo",
generics: [], generics: [],
typeFilter: 4,
}], }],
foundElems: 1, foundElems: 1,
original: "enum : foo", original: "enum : foo",
returned: [], returned: [],
typeFilter: 4,
userQuery: "enum : foo", userQuery: "enum : foo",
error: null, error: null,
}, },
@ -36,9 +46,8 @@ const PARSED = [
foundElems: 0, foundElems: 0,
original: "macro<f>:foo", original: "macro<f>:foo",
returned: [], returned: [],
typeFilter: -1,
userQuery: "macro<f>:foo", userQuery: "macro<f>:foo",
error: "Unexpected `:`", error: "Unexpected `<` in type filter",
}, },
{ {
elems: [{ elems: [{
@ -47,11 +56,11 @@ const PARSED = [
pathWithoutLast: [], pathWithoutLast: [],
pathLast: "macro", pathLast: "macro",
generics: [], generics: [],
typeFilter: 14,
}], }],
foundElems: 1, foundElems: 1,
original: "macro!", original: "macro!",
returned: [], returned: [],
typeFilter: 14,
userQuery: "macro!", userQuery: "macro!",
error: null, error: null,
}, },
@ -62,11 +71,11 @@ const PARSED = [
pathWithoutLast: [], pathWithoutLast: [],
pathLast: "mac", pathLast: "mac",
generics: [], generics: [],
typeFilter: 14,
}], }],
foundElems: 1, foundElems: 1,
original: "macro:mac!", original: "macro:mac!",
returned: [], returned: [],
typeFilter: 14,
userQuery: "macro:mac!", userQuery: "macro:mac!",
error: null, error: null,
}, },
@ -77,12 +86,83 @@ const PARSED = [
pathWithoutLast: ["a"], pathWithoutLast: ["a"],
pathLast: "mac", pathLast: "mac",
generics: [], generics: [],
typeFilter: 14,
}], }],
foundElems: 1, foundElems: 1,
original: "a::mac!", original: "a::mac!",
returned: [], returned: [],
typeFilter: 14,
userQuery: "a::mac!", userQuery: "a::mac!",
error: null, error: null,
}, },
{
elems: [],
foundElems: 1,
original: "-> fn:foo",
returned: [{
name: "foo",
fullPath: ["foo"],
pathWithoutLast: [],
pathLast: "foo",
generics: [],
typeFilter: 5,
}],
userQuery: "-> fn:foo",
error: null,
},
{
elems: [],
foundElems: 1,
original: "-> fn:foo<fn:bar>",
returned: [{
name: "foo",
fullPath: ["foo"],
pathWithoutLast: [],
pathLast: "foo",
generics: [
{
name: "bar",
fullPath: ["bar"],
pathWithoutLast: [],
pathLast: "bar",
generics: [],
typeFilter: 5,
}
],
typeFilter: 5,
}],
userQuery: "-> fn:foo<fn:bar>",
error: null,
},
{
elems: [],
foundElems: 1,
original: "-> fn:foo<fn:bar, enum : baz::fuzz>",
returned: [{
name: "foo",
fullPath: ["foo"],
pathWithoutLast: [],
pathLast: "foo",
generics: [
{
name: "bar",
fullPath: ["bar"],
pathWithoutLast: [],
pathLast: "bar",
generics: [],
typeFilter: 5,
},
{
name: "baz::fuzz",
fullPath: ["baz", "fuzz"],
pathWithoutLast: ["baz"],
pathLast: "fuzz",
generics: [],
typeFilter: 4,
},
],
typeFilter: 5,
}],
userQuery: "-> fn:foo<fn:bar, enum : baz::fuzz>",
error: null,
},
]; ];

View file

@ -6,7 +6,6 @@ const PARSED = [
foundElems: 0, foundElems: 0,
original: 'A<B<C<D>, E>', original: 'A<B<C<D>, E>',
returned: [], returned: [],
typeFilter: -1,
userQuery: 'a<b<c<d>, e>', userQuery: 'a<b<c<d>, e>',
error: 'Unexpected `<` after `<`', error: 'Unexpected `<` after `<`',
}, },
@ -18,6 +17,7 @@ const PARSED = [
pathWithoutLast: [], pathWithoutLast: [],
pathLast: "p", pathLast: "p",
generics: [], generics: [],
typeFilter: -1,
}, },
{ {
name: "u8", name: "u8",
@ -25,12 +25,12 @@ const PARSED = [
pathWithoutLast: [], pathWithoutLast: [],
pathLast: "u8", pathLast: "u8",
generics: [], generics: [],
typeFilter: -1,
}, },
], ],
foundElems: 2, foundElems: 2,
original: "p<> u8", original: "p<> u8",
returned: [], returned: [],
typeFilter: -1,
userQuery: "p<> u8", userQuery: "p<> u8",
error: null, error: null,
}, },
@ -50,12 +50,12 @@ const PARSED = [
generics: [], generics: [],
}, },
], ],
typeFilter: -1,
}, },
], ],
foundElems: 1, foundElems: 1,
original: '"p"<a>', original: '"p"<a>',
returned: [], returned: [],
typeFilter: -1,
userQuery: '"p"<a>', userQuery: '"p"<a>',
error: null, error: null,
}, },

View file

@ -23,11 +23,11 @@ const PARSED = [
generics: [], generics: [],
}, },
], ],
typeFilter: -1,
}], }],
foundElems: 1, foundElems: 1,
original: "R<!>", original: "R<!>",
returned: [], returned: [],
typeFilter: -1,
userQuery: "r<!>", userQuery: "r<!>",
error: null, error: null,
}, },
@ -38,11 +38,11 @@ const PARSED = [
pathWithoutLast: [], pathWithoutLast: [],
pathLast: "!", pathLast: "!",
generics: [], generics: [],
typeFilter: -1,
}], }],
foundElems: 1, foundElems: 1,
original: "!", original: "!",
returned: [], returned: [],
typeFilter: -1,
userQuery: "!", userQuery: "!",
error: null, error: null,
}, },
@ -53,11 +53,11 @@ const PARSED = [
pathWithoutLast: [], pathWithoutLast: [],
pathLast: "a", pathLast: "a",
generics: [], generics: [],
typeFilter: 14,
}], }],
foundElems: 1, foundElems: 1,
original: "a!", original: "a!",
returned: [], returned: [],
typeFilter: 14,
userQuery: "a!", userQuery: "a!",
error: null, error: null,
}, },
@ -66,7 +66,6 @@ const PARSED = [
foundElems: 0, foundElems: 0,
original: "a!::b", original: "a!::b",
returned: [], returned: [],
typeFilter: -1,
userQuery: "a!::b", userQuery: "a!::b",
error: "Cannot have associated items in macros", error: "Cannot have associated items in macros",
}, },
@ -77,11 +76,11 @@ const PARSED = [
pathWithoutLast: ["!"], pathWithoutLast: ["!"],
pathLast: "b", pathLast: "b",
generics: [], generics: [],
typeFilter: -1,
}], }],
foundElems: 1, foundElems: 1,
original: "!::b", original: "!::b",
returned: [], returned: [],
typeFilter: -1,
userQuery: "!::b", userQuery: "!::b",
error: null, error: null,
}, },
@ -90,7 +89,6 @@ const PARSED = [
foundElems: 0, foundElems: 0,
original: "a!::b!", original: "a!::b!",
returned: [], returned: [],
typeFilter: -1,
userQuery: "a!::b!", userQuery: "a!::b!",
error: "Cannot have associated items in macros", error: "Cannot have associated items in macros",
}, },

View file

@ -16,11 +16,11 @@ const PARSED = [
generics: [], generics: [],
}, },
], ],
typeFilter: -1,
}], }],
foundElems: 1, foundElems: 1,
original: "R<P>", original: "R<P>",
returned: [], returned: [],
typeFilter: -1,
userQuery: "r<p>", userQuery: "r<p>",
error: null, error: null,
} }

View file

@ -8,11 +8,11 @@ const PARSED = [
pathWithoutLast: ["a"], pathWithoutLast: ["a"],
pathLast: "b", pathLast: "b",
generics: [], generics: [],
typeFilter: -1,
}], }],
foundElems: 1, foundElems: 1,
original: "A::B", original: "A::B",
returned: [], returned: [],
typeFilter: -1,
userQuery: "a::b", userQuery: "a::b",
error: null, error: null,
}, },
@ -24,6 +24,7 @@ const PARSED = [
pathWithoutLast: ["a"], pathWithoutLast: ["a"],
pathLast: "b", pathLast: "b",
generics: [], generics: [],
typeFilter: -1,
}, },
{ {
name: "c", name: "c",
@ -31,12 +32,12 @@ const PARSED = [
pathWithoutLast: [], pathWithoutLast: [],
pathLast: "c", pathLast: "c",
generics: [], generics: [],
typeFilter: -1,
}, },
], ],
foundElems: 2, foundElems: 2,
original: 'A::B,C', original: 'A::B,C',
returned: [], returned: [],
typeFilter: -1,
userQuery: 'a::b,c', userQuery: 'a::b,c',
error: null, error: null,
}, },
@ -56,6 +57,7 @@ const PARSED = [
generics: [], generics: [],
}, },
], ],
typeFilter: -1,
}, },
{ {
name: "c", name: "c",
@ -63,12 +65,12 @@ const PARSED = [
pathWithoutLast: [], pathWithoutLast: [],
pathLast: "c", pathLast: "c",
generics: [], generics: [],
typeFilter: -1,
}, },
], ],
foundElems: 2, foundElems: 2,
original: 'A::B<f>,C', original: 'A::B<f>,C',
returned: [], returned: [],
typeFilter: -1,
userQuery: 'a::b<f>,c', userQuery: 'a::b<f>,c',
error: null, error: null,
}, },
@ -79,11 +81,11 @@ const PARSED = [
pathWithoutLast: ["mod"], pathWithoutLast: ["mod"],
pathLast: "a", pathLast: "a",
generics: [], generics: [],
typeFilter: -1,
}], }],
foundElems: 1, foundElems: 1,
original: "mod::a", original: "mod::a",
returned: [], returned: [],
typeFilter: -1,
userQuery: "mod::a", userQuery: "mod::a",
error: null, error: null,
}, },

View file

@ -19,8 +19,8 @@ const PARSED = [
pathWithoutLast: [], pathWithoutLast: [],
pathLast: "p", pathLast: "p",
generics: [], generics: [],
typeFilter: -1,
}], }],
typeFilter: -1,
userQuery: '-> "p"', userQuery: '-> "p"',
error: null, error: null,
}, },
@ -31,11 +31,11 @@ const PARSED = [
pathWithoutLast: [], pathWithoutLast: [],
pathLast: "p", pathLast: "p",
generics: [], generics: [],
typeFilter: -1,
}], }],
foundElems: 1, foundElems: 1,
original: '"p",', original: '"p",',
returned: [], returned: [],
typeFilter: -1,
userQuery: '"p",', userQuery: '"p",',
error: null, error: null,
}, },
@ -44,7 +44,6 @@ const PARSED = [
foundElems: 0, foundElems: 0,
original: '"p" -> a', original: '"p" -> a',
returned: [], returned: [],
typeFilter: -1,
userQuery: '"p" -> a', userQuery: '"p" -> a',
error: "You cannot have more than one element if you use quotes", error: "You cannot have more than one element if you use quotes",
}, },
@ -53,7 +52,6 @@ const PARSED = [
foundElems: 0, foundElems: 0,
original: '"a" -> "p"', original: '"a" -> "p"',
returned: [], returned: [],
typeFilter: -1,
userQuery: '"a" -> "p"', userQuery: '"a" -> "p"',
error: "Cannot have more than one literal search element", error: "Cannot have more than one literal search element",
}, },
@ -62,7 +60,6 @@ const PARSED = [
foundElems: 0, foundElems: 0,
original: '->"-"', original: '->"-"',
returned: [], returned: [],
typeFilter: -1,
userQuery: '->"-"', userQuery: '->"-"',
error: 'Unexpected `-` in a string element', error: 'Unexpected `-` in a string element',
}, },
@ -71,7 +68,6 @@ const PARSED = [
foundElems: 0, foundElems: 0,
original: '"a', original: '"a',
returned: [], returned: [],
typeFilter: -1,
userQuery: '"a', userQuery: '"a',
error: 'Unclosed `"`', error: 'Unclosed `"`',
}, },
@ -80,7 +76,6 @@ const PARSED = [
foundElems: 0, foundElems: 0,
original: '""', original: '""',
returned: [], returned: [],
typeFilter: -1,
userQuery: '""', userQuery: '""',
error: 'Cannot have empty string element', error: 'Cannot have empty string element',
}, },

View file

@ -25,8 +25,8 @@ const PARSED = [
generics: [], generics: [],
}, },
], ],
typeFilter: -1,
}], }],
typeFilter: -1,
userQuery: "-> f<p>", userQuery: "-> f<p>",
error: null, error: null,
}, },
@ -40,8 +40,8 @@ const PARSED = [
pathWithoutLast: [], pathWithoutLast: [],
pathLast: "p", pathLast: "p",
generics: [], generics: [],
typeFilter: -1,
}], }],
typeFilter: -1,
userQuery: "-> p", userQuery: "-> p",
error: null, error: null,
}, },
@ -55,8 +55,8 @@ const PARSED = [
pathWithoutLast: [], pathWithoutLast: [],
pathLast: "a", pathLast: "a",
generics: [], generics: [],
typeFilter: -1,
}], }],
typeFilter: -1,
userQuery: "->,a", userQuery: "->,a",
error: null, error: null,
}, },
@ -67,6 +67,7 @@ const PARSED = [
pathWithoutLast: [], pathWithoutLast: [],
pathLast: "aaaaa", pathLast: "aaaaa",
generics: [], generics: [],
typeFilter: -1,
}], }],
foundElems: 2, foundElems: 2,
original: "aaaaa->a", original: "aaaaa->a",
@ -76,8 +77,8 @@ const PARSED = [
pathWithoutLast: [], pathWithoutLast: [],
pathLast: "a", pathLast: "a",
generics: [], generics: [],
typeFilter: -1,
}], }],
typeFilter: -1,
userQuery: "aaaaa->a", userQuery: "aaaaa->a",
error: null, error: null,
}, },
@ -91,8 +92,8 @@ const PARSED = [
pathWithoutLast: [], pathWithoutLast: [],
pathLast: "!", pathLast: "!",
generics: [], generics: [],
typeFilter: -1,
}], }],
typeFilter: -1,
userQuery: "-> !", userQuery: "-> !",
error: null, error: null,
}, },

View file

@ -19,6 +19,7 @@ const PARSED = [
pathWithoutLast: [], pathWithoutLast: [],
pathLast: 'aaaaaa', pathLast: 'aaaaaa',
generics: [], generics: [],
typeFilter: -1,
}, },
{ {
name: 'b', name: 'b',
@ -26,12 +27,12 @@ const PARSED = [
pathWithoutLast: [], pathWithoutLast: [],
pathLast: 'b', pathLast: 'b',
generics: [], generics: [],
typeFilter: -1,
}, },
], ],
foundElems: 2, foundElems: 2,
original: "aaaaaa b", original: "aaaaaa b",
returned: [], returned: [],
typeFilter: -1,
userQuery: "aaaaaa b", userQuery: "aaaaaa b",
error: null, error: null,
}, },
@ -43,6 +44,7 @@ const PARSED = [
pathWithoutLast: [], pathWithoutLast: [],
pathLast: 'a', pathLast: 'a',
generics: [], generics: [],
typeFilter: -1,
}, },
{ {
name: 'b', name: 'b',
@ -50,12 +52,12 @@ const PARSED = [
pathWithoutLast: [], pathWithoutLast: [],
pathLast: 'b', pathLast: 'b',
generics: [], generics: [],
typeFilter: -1,
}, },
], ],
foundElems: 2, foundElems: 2,
original: "a b", original: "a b",
returned: [], returned: [],
typeFilter: -1,
userQuery: "a b", userQuery: "a b",
error: null, error: null,
}, },
@ -67,6 +69,7 @@ const PARSED = [
pathWithoutLast: [], pathWithoutLast: [],
pathLast: 'a', pathLast: 'a',
generics: [], generics: [],
typeFilter: -1,
}, },
{ {
name: 'b', name: 'b',
@ -74,12 +77,12 @@ const PARSED = [
pathWithoutLast: [], pathWithoutLast: [],
pathLast: 'b', pathLast: 'b',
generics: [], generics: [],
typeFilter: -1,
}, },
], ],
foundElems: 2, foundElems: 2,
original: "a,b", original: "a,b",
returned: [], returned: [],
typeFilter: -1,
userQuery: "a,b", userQuery: "a,b",
error: null, error: null,
}, },
@ -91,6 +94,7 @@ const PARSED = [
pathWithoutLast: [], pathWithoutLast: [],
pathLast: 'a', pathLast: 'a',
generics: [], generics: [],
typeFilter: -1,
}, },
{ {
name: 'b', name: 'b',
@ -98,12 +102,12 @@ const PARSED = [
pathWithoutLast: [], pathWithoutLast: [],
pathLast: 'b', pathLast: 'b',
generics: [], generics: [],
typeFilter: -1,
}, },
], ],
foundElems: 2, foundElems: 2,
original: "a\tb", original: "a\tb",
returned: [], returned: [],
typeFilter: -1,
userQuery: "a\tb", userQuery: "a\tb",
error: null, error: null,
}, },
@ -130,12 +134,12 @@ const PARSED = [
generics: [], generics: [],
}, },
], ],
typeFilter: -1,
}, },
], ],
foundElems: 1, foundElems: 1,
original: "a<b c>", original: "a<b c>",
returned: [], returned: [],
typeFilter: -1,
userQuery: "a<b c>", userQuery: "a<b c>",
error: null, error: null,
}, },
@ -162,12 +166,12 @@ const PARSED = [
generics: [], generics: [],
}, },
], ],
typeFilter: -1,
}, },
], ],
foundElems: 1, foundElems: 1,
original: "a<b,c>", original: "a<b,c>",
returned: [], returned: [],
typeFilter: -1,
userQuery: "a<b,c>", userQuery: "a<b,c>",
error: null, error: null,
}, },
@ -194,12 +198,12 @@ const PARSED = [
generics: [], generics: [],
}, },
], ],
typeFilter: -1,
}, },
], ],
foundElems: 1, foundElems: 1,
original: "a<b\tc>", original: "a<b\tc>",
returned: [], returned: [],
typeFilter: -1,
userQuery: "a<b\tc>", userQuery: "a<b\tc>",
error: null, error: null,
}, },

View file

@ -20,6 +20,7 @@ const PARSED = [
pathWithoutLast: [], pathWithoutLast: [],
pathLast: "a", pathLast: "a",
generics: [], generics: [],
typeFilter: -1,
}, },
{ {
name: "b", name: "b",
@ -27,12 +28,12 @@ const PARSED = [
pathWithoutLast: [], pathWithoutLast: [],
pathLast: "b", pathLast: "b",
generics: [], generics: [],
typeFilter: -1,
}, },
], ],
foundElems: 2, foundElems: 2,
original: "a b", original: "a b",
returned: [], returned: [],
typeFilter: -1,
userQuery: "a b", userQuery: "a b",
error: null, error: null,
}, },
@ -44,6 +45,7 @@ const PARSED = [
pathWithoutLast: [], pathWithoutLast: [],
pathLast: "a", pathLast: "a",
generics: [], generics: [],
typeFilter: -1,
}, },
{ {
name: "b", name: "b",
@ -51,12 +53,12 @@ const PARSED = [
pathWithoutLast: [], pathWithoutLast: [],
pathLast: "b", pathLast: "b",
generics: [], generics: [],
typeFilter: -1,
}, },
], ],
foundElems: 2, foundElems: 2,
original: "a b", original: "a b",
returned: [], returned: [],
typeFilter: -1,
userQuery: "a b", userQuery: "a b",
error: null, error: null,
}, },
@ -65,7 +67,6 @@ const PARSED = [
foundElems: 0, foundElems: 0,
original: "a,b(c)", original: "a,b(c)",
returned: [], returned: [],
typeFilter: -1,
userQuery: "a,b(c)", userQuery: "a,b(c)",
error: "Unexpected `(`", error: "Unexpected `(`",
}, },
@ -77,6 +78,7 @@ const PARSED = [
pathWithoutLast: [], pathWithoutLast: [],
pathLast: "aaa", pathLast: "aaa",
generics: [], generics: [],
typeFilter: -1,
}, },
{ {
name: "a", name: "a",
@ -84,12 +86,12 @@ const PARSED = [
pathWithoutLast: [], pathWithoutLast: [],
pathLast: "a", pathLast: "a",
generics: [], generics: [],
typeFilter: -1,
}, },
], ],
foundElems: 2, foundElems: 2,
original: "aaa,a", original: "aaa,a",
returned: [], returned: [],
typeFilter: -1,
userQuery: "aaa,a", userQuery: "aaa,a",
error: null, error: null,
}, },
@ -98,7 +100,6 @@ const PARSED = [
foundElems: 0, foundElems: 0,
original: ",,,,", original: ",,,,",
returned: [], returned: [],
typeFilter: -1,
userQuery: ",,,,", userQuery: ",,,,",
error: null, error: null,
}, },
@ -107,17 +108,15 @@ const PARSED = [
foundElems: 0, foundElems: 0,
original: 'mod :', original: 'mod :',
returned: [], returned: [],
typeFilter: 0,
userQuery: 'mod :', userQuery: 'mod :',
error: null, error: "Unexpected `:` (expected path after type filter)",
}, },
{ {
elems: [], elems: [],
foundElems: 0, foundElems: 0,
original: 'mod\t:', original: 'mod\t:',
returned: [], returned: [],
typeFilter: 0,
userQuery: 'mod\t:', userQuery: 'mod\t:',
error: null, error: "Unexpected `:` (expected path after type filter)",
}, },
]; ];

View file

@ -5,6 +5,8 @@ const QUERY = [
'Aaaaaaa -> bool', 'Aaaaaaa -> bool',
'Aaaaaaa -> usize', 'Aaaaaaa -> usize',
'Read -> u64', 'Read -> u64',
'trait:Read -> u64',
'struct:Read -> u64',
'bool -> u64', 'bool -> u64',
'Ddddddd -> u64', 'Ddddddd -> u64',
'-> Ddddddd' '-> Ddddddd'
@ -36,6 +38,17 @@ const EXPECTED = [
{ 'path': 'generics_impl::Ddddddd', 'name': 'ggggggg' }, { 'path': 'generics_impl::Ddddddd', 'name': 'ggggggg' },
], ],
}, },
{
// trait:Read -> u64
'others': [
{ 'path': 'generics_impl::Ddddddd', 'name': 'eeeeeee' },
{ 'path': 'generics_impl::Ddddddd', 'name': 'ggggggg' },
],
},
{
// struct:Read -> u64
'others': [],
},
{ {
// bool -> u64 // bool -> u64
'others': [ 'others': [

View file

@ -2,6 +2,8 @@
const QUERY = [ const QUERY = [
'R<P>', 'R<P>',
'R<struct:P>',
'R<enum:P>',
'"P"', '"P"',
'P', 'P',
'ExtraCreditStructMulti<ExtraCreditInnerMulti, ExtraCreditInnerMulti>', 'ExtraCreditStructMulti<ExtraCreditInnerMulti, ExtraCreditInnerMulti>',
@ -20,6 +22,20 @@ const EXPECTED = [
{ 'path': 'generics', 'name': 'alpha' }, { 'path': 'generics', 'name': 'alpha' },
], ],
}, },
{
// R<struct:P>
'returned': [
{ 'path': 'generics', 'name': 'alef' },
],
'in_args': [
{ 'path': 'generics', 'name': 'alpha' },
],
},
{
// R<enum:P>
'returned': [],
'in_args': [],
},
{ {
// "P" // "P"
'others': [ 'others': [

View file

@ -3,6 +3,8 @@
const QUERY = [ const QUERY = [
"i32", "i32",
"str", "str",
"primitive:str",
"struct:str",
"TotoIsSomewhere", "TotoIsSomewhere",
]; ];
@ -17,6 +19,14 @@ const EXPECTED = [
{ 'path': 'primitive', 'name': 'foo' }, { 'path': 'primitive', 'name': 'foo' },
], ],
}, },
{
'returned': [
{ 'path': 'primitive', 'name': 'foo' },
],
},
{
'returned': [],
},
{ {
'others': [], 'others': [],
'in_args': [], 'in_args': [],

View file

@ -1,8 +1,8 @@
error: unconstrained generic constant error: unconstrained generic constant
--> $DIR/cross_crate_predicate.rs:7:13 --> $DIR/cross_crate_predicate.rs:7:44
| |
LL | let _ = const_evaluatable_lib::test1::<T>(); LL | let _ = const_evaluatable_lib::test1::<T>();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | ^
| |
= help: try adding a `where` bound using this expression: `where [(); std::mem::size_of::<T>() - 1]:` = help: try adding a `where` bound using this expression: `where [(); std::mem::size_of::<T>() - 1]:`
note: required by a bound in `test1` note: required by a bound in `test1`
@ -12,10 +12,10 @@ LL | [u8; std::mem::size_of::<T>() - 1]: Sized,
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `test1` | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `test1`
error: unconstrained generic constant error: unconstrained generic constant
--> $DIR/cross_crate_predicate.rs:7:13 --> $DIR/cross_crate_predicate.rs:7:44
| |
LL | let _ = const_evaluatable_lib::test1::<T>(); LL | let _ = const_evaluatable_lib::test1::<T>();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | ^
| |
= help: try adding a `where` bound using this expression: `where [(); std::mem::size_of::<T>() - 1]:` = help: try adding a `where` bound using this expression: `where [(); std::mem::size_of::<T>() - 1]:`
note: required by a bound in `test1` note: required by a bound in `test1`

View file

@ -1,8 +1,8 @@
error: the constant `N` is not of type `u8` error: the constant `N` is not of type `u8`
--> $DIR/type_mismatch.rs:2:5 --> $DIR/type_mismatch.rs:2:11
| |
LL | bar::<N>() LL | bar::<N>()
| ^^^^^^^^ | ^ expected `u8`, found `usize`
| |
note: required by a bound in `bar` note: required by a bound in `bar`
--> $DIR/type_mismatch.rs:6:8 --> $DIR/type_mismatch.rs:6:8

View file

@ -2,7 +2,7 @@ error: the constant `N` is not of type `usize`
--> $DIR/bad-const-wf-doesnt-specialize.rs:8:29 --> $DIR/bad-const-wf-doesnt-specialize.rs:8:29
| |
LL | impl<const N: i32> Copy for S<N> {} LL | impl<const N: i32> Copy for S<N> {}
| ^^^^ | ^^^^ expected `usize`, found `i32`
| |
note: required by a bound in `S` note: required by a bound in `S`
--> $DIR/bad-const-wf-doesnt-specialize.rs:6:10 --> $DIR/bad-const-wf-doesnt-specialize.rs:6:10

View file

@ -0,0 +1,40 @@
// check-pass
// compile-flags: -Ztrait-solver=next
trait Foo {
type Gat<'a>
where
Self: 'a;
fn bar(&self) -> Self::Gat<'_>;
}
enum Option<T> {
Some(T),
None,
}
impl<T> Option<T> {
fn as_ref(&self) -> Option<&T> {
match self {
Option::Some(t) => Option::Some(t),
Option::None => Option::None,
}
}
fn map<U>(self, f: impl FnOnce(T) -> U) -> Option<U> {
match self {
Option::Some(t) => Option::Some(f(t)),
Option::None => Option::None,
}
}
}
impl<T: Foo + 'static> Foo for Option<T> {
type Gat<'a> = Option<<T as Foo>::Gat<'a>> where Self: 'a;
fn bar(&self) -> Self::Gat<'_> {
self.as_ref().map(Foo::bar)
}
}
fn main() {}

View file

@ -8,7 +8,7 @@ error: the constant `ASSUME_ALIGNMENT` is not of type `Assume`
--> $DIR/issue-101739-1.rs:8:14 --> $DIR/issue-101739-1.rs:8:14
| |
LL | Dst: BikeshedIntrinsicFrom<Src, Context, ASSUME_ALIGNMENT>, LL | Dst: BikeshedIntrinsicFrom<Src, Context, ASSUME_ALIGNMENT>,
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ expected `Assume`, found `bool`
| |
note: required by a bound in `BikeshedIntrinsicFrom` note: required by a bound in `BikeshedIntrinsicFrom`
--> $SRC_DIR/core/src/mem/transmutability.rs:LL:COL --> $SRC_DIR/core/src/mem/transmutability.rs:LL:COL