1
Fork 0

Auto merge of #69402 - GuillaumeGomez:extend-search, r=kinnison

Extend search

I realized that when looking for "struct:String" in the rustdoc search for example, the "in arguments" and "returned" tabs were always empty. After some investigation, I realized it was because we only provided the name, and not the type, making it impossible to pass the "type filtering" check.

To resolve this, I added the type alongside the name. Note for the future: we could improve this by instead only registering the path id and use the path dictionary directly. The only problem with that solution (which I already tested) is that it becomes complicated for types in other crates. It'd force us to handle both case with an id and a case with `(name, type)`. I found the current PR big enough to not want to provide it directly. However, I think this is definitely worth it to make it work this way in the future.

About the two tests I added: they don't have much interest except checking that we actually have something returned in the search in the cases of a type filtering with and without literal search.

I also had to update a bit the test script to add the new locally global (haha) variable I created (`NO_TYPE_FILTER`). I added this variable to make the code easier to read than just "-1".

r? @kinnison

cc @ollie27
This commit is contained in:
bors 2020-03-19 16:07:59 +00:00
commit f4c675c476
10 changed files with 237 additions and 108 deletions

View file

@ -1078,6 +1078,26 @@ impl Clean<PolyTrait> for hir::PolyTraitRef<'_> {
} }
} }
impl Clean<TypeKind> for hir::def::DefKind {
fn clean(&self, _: &DocContext<'_>) -> TypeKind {
match *self {
hir::def::DefKind::Mod => TypeKind::Module,
hir::def::DefKind::Struct => TypeKind::Struct,
hir::def::DefKind::Union => TypeKind::Union,
hir::def::DefKind::Enum => TypeKind::Enum,
hir::def::DefKind::Trait => TypeKind::Trait,
hir::def::DefKind::TyAlias => TypeKind::Typedef,
hir::def::DefKind::ForeignTy => TypeKind::Foreign,
hir::def::DefKind::TraitAlias => TypeKind::TraitAlias,
hir::def::DefKind::Fn => TypeKind::Function,
hir::def::DefKind::Const => TypeKind::Const,
hir::def::DefKind::Static => TypeKind::Static,
hir::def::DefKind::Macro(_) => TypeKind::Macro,
_ => TypeKind::Foreign,
}
}
}
impl Clean<Item> for hir::TraitItem<'_> { impl Clean<Item> for hir::TraitItem<'_> {
fn clean(&self, cx: &DocContext<'_>) -> Item { fn clean(&self, cx: &DocContext<'_>) -> Item {
let inner = match self.kind { let inner = match self.kind {

View file

@ -836,8 +836,8 @@ pub struct Method {
pub decl: FnDecl, pub decl: FnDecl,
pub header: hir::FnHeader, pub header: hir::FnHeader,
pub defaultness: Option<hir::Defaultness>, pub defaultness: Option<hir::Defaultness>,
pub all_types: Vec<Type>, pub all_types: Vec<(Type, TypeKind)>,
pub ret_types: Vec<Type>, pub ret_types: Vec<(Type, TypeKind)>,
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
@ -845,8 +845,8 @@ pub struct TyMethod {
pub header: hir::FnHeader, pub header: hir::FnHeader,
pub decl: FnDecl, pub decl: FnDecl,
pub generics: Generics, pub generics: Generics,
pub all_types: Vec<Type>, pub all_types: Vec<(Type, TypeKind)>,
pub ret_types: Vec<Type>, pub ret_types: Vec<(Type, TypeKind)>,
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
@ -854,8 +854,8 @@ pub struct Function {
pub decl: FnDecl, pub decl: FnDecl,
pub generics: Generics, pub generics: Generics,
pub header: hir::FnHeader, pub header: hir::FnHeader,
pub all_types: Vec<Type>, pub all_types: Vec<(Type, TypeKind)>,
pub ret_types: Vec<Type>, pub ret_types: Vec<(Type, TypeKind)>,
} }
#[derive(Clone, PartialEq, Eq, Debug, Hash)] #[derive(Clone, PartialEq, Eq, Debug, Hash)]
@ -1042,7 +1042,7 @@ pub enum PrimitiveType {
Never, Never,
} }
#[derive(Clone, Copy, Debug)] #[derive(Clone, PartialEq, Eq, Hash, Copy, Debug)]
pub enum TypeKind { pub enum TypeKind {
Enum, Enum,
Function, Function,

View file

@ -184,7 +184,7 @@ pub fn get_real_types(
arg: &Type, arg: &Type,
cx: &DocContext<'_>, cx: &DocContext<'_>,
recurse: i32, recurse: i32,
) -> FxHashSet<Type> { ) -> FxHashSet<(Type, TypeKind)> {
let arg_s = arg.print().to_string(); let arg_s = arg.print().to_string();
let mut res = FxHashSet::default(); let mut res = FxHashSet::default();
if recurse >= 10 { if recurse >= 10 {
@ -209,7 +209,11 @@ pub fn get_real_types(
if !adds.is_empty() { if !adds.is_empty() {
res.extend(adds); res.extend(adds);
} else if !ty.is_full_generic() { } else if !ty.is_full_generic() {
res.insert(ty); if let Some(did) = ty.def_id() {
if let Some(kind) = cx.tcx.def_kind(did).clean(cx) {
res.insert((ty, kind));
}
}
} }
} }
} }
@ -225,13 +229,21 @@ pub fn get_real_types(
if !adds.is_empty() { if !adds.is_empty() {
res.extend(adds); res.extend(adds);
} else if !ty.is_full_generic() { } else if !ty.is_full_generic() {
res.insert(ty.clone()); if let Some(did) = ty.def_id() {
if let Some(kind) = cx.tcx.def_kind(did).clean(cx) {
res.insert((ty.clone(), kind));
}
}
} }
} }
} }
} }
} else { } else {
res.insert(arg.clone()); if let Some(did) = arg.def_id() {
if let Some(kind) = cx.tcx.def_kind(did).clean(cx) {
res.insert((arg.clone(), kind));
}
}
if let Some(gens) = arg.generics() { if let Some(gens) = arg.generics() {
for gen in gens.iter() { for gen in gens.iter() {
if gen.is_full_generic() { if gen.is_full_generic() {
@ -239,8 +251,10 @@ pub fn get_real_types(
if !adds.is_empty() { if !adds.is_empty() {
res.extend(adds); res.extend(adds);
} }
} else { } else if let Some(did) = gen.def_id() {
res.insert(gen.clone()); if let Some(kind) = cx.tcx.def_kind(did).clean(cx) {
res.insert((gen.clone(), kind));
}
} }
} }
} }
@ -256,7 +270,7 @@ pub fn get_all_types(
generics: &Generics, generics: &Generics,
decl: &FnDecl, decl: &FnDecl,
cx: &DocContext<'_>, cx: &DocContext<'_>,
) -> (Vec<Type>, Vec<Type>) { ) -> (Vec<(Type, TypeKind)>, Vec<(Type, TypeKind)>) {
let mut all_types = FxHashSet::default(); let mut all_types = FxHashSet::default();
for arg in decl.inputs.values.iter() { for arg in decl.inputs.values.iter() {
if arg.type_.is_self_type() { if arg.type_.is_self_type() {
@ -266,7 +280,11 @@ pub fn get_all_types(
if !args.is_empty() { if !args.is_empty() {
all_types.extend(args); all_types.extend(args);
} else { } else {
all_types.insert(arg.type_.clone()); if let Some(did) = arg.type_.def_id() {
if let Some(kind) = cx.tcx.def_kind(did).clean(cx) {
all_types.insert((arg.type_.clone(), kind));
}
}
} }
} }
@ -274,7 +292,11 @@ pub fn get_all_types(
FnRetTy::Return(ref return_type) => { FnRetTy::Return(ref return_type) => {
let mut ret = get_real_types(generics, &return_type, cx, 0); let mut ret = get_real_types(generics, &return_type, cx, 0);
if ret.is_empty() { if ret.is_empty() {
ret.insert(return_type.clone()); if let Some(did) = return_type.def_id() {
if let Some(kind) = cx.tcx.def_kind(did).clean(cx) {
ret.insert((return_type.clone(), kind));
}
}
} }
ret.into_iter().collect() ret.into_iter().collect()
} }

View file

@ -58,7 +58,7 @@ use rustc_span::symbol::{sym, Symbol};
use serde::ser::SerializeSeq; use serde::ser::SerializeSeq;
use serde::{Serialize, Serializer}; use serde::{Serialize, Serializer};
use crate::clean::{self, AttributesExt, Deprecation, GetDefId, SelfTy}; use crate::clean::{self, AttributesExt, Deprecation, GetDefId, SelfTy, TypeKind};
use crate::config::{OutputFormat, RenderOptions}; use crate::config::{OutputFormat, RenderOptions};
use crate::docfs::{DocFS, ErrorStorage, PathError}; use crate::docfs::{DocFS, ErrorStorage, PathError};
use crate::doctree; use crate::doctree;
@ -302,19 +302,25 @@ impl Serialize for IndexItem {
/// A type used for the search index. /// A type used for the search index.
#[derive(Debug)] #[derive(Debug)]
struct Type { struct RenderType {
ty: Option<DefId>,
idx: Option<usize>,
name: Option<String>, name: Option<String>,
generics: Option<Vec<String>>, generics: Option<Vec<Generic>>,
} }
impl Serialize for Type { impl Serialize for RenderType {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where where
S: Serializer, S: Serializer,
{ {
if let Some(name) = &self.name { if let Some(name) = &self.name {
let mut seq = serializer.serialize_seq(None)?; let mut seq = serializer.serialize_seq(None)?;
seq.serialize_element(&name)?; if let Some(id) = self.idx {
seq.serialize_element(&id)?;
} else {
seq.serialize_element(&name)?;
}
if let Some(generics) = &self.generics { if let Some(generics) = &self.generics {
seq.serialize_element(&generics)?; seq.serialize_element(&generics)?;
} }
@ -325,11 +331,32 @@ impl Serialize for Type {
} }
} }
/// A type used for the search index.
#[derive(Debug)]
struct Generic {
name: String,
defid: Option<DefId>,
idx: Option<usize>,
}
impl Serialize for Generic {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
if let Some(id) = self.idx {
serializer.serialize_some(&id)
} else {
serializer.serialize_some(&self.name)
}
}
}
/// Full type of functions/methods in the search index. /// Full type of functions/methods in the search index.
#[derive(Debug)] #[derive(Debug)]
struct IndexItemFunctionType { struct IndexItemFunctionType {
inputs: Vec<Type>, inputs: Vec<TypeWithKind>,
output: Option<Vec<Type>>, output: Option<Vec<TypeWithKind>>,
} }
impl Serialize for IndexItemFunctionType { impl Serialize for IndexItemFunctionType {
@ -340,8 +367,8 @@ impl Serialize for IndexItemFunctionType {
// If we couldn't figure out a type, just write `null`. // If we couldn't figure out a type, just write `null`.
let mut iter = self.inputs.iter(); let mut iter = self.inputs.iter();
if match self.output { if match self.output {
Some(ref output) => iter.chain(output.iter()).any(|ref i| i.name.is_none()), Some(ref output) => iter.chain(output.iter()).any(|ref i| i.ty.name.is_none()),
None => iter.any(|ref i| i.name.is_none()), None => iter.any(|ref i| i.ty.name.is_none()),
} { } {
serializer.serialize_none() serializer.serialize_none()
} else { } else {
@ -359,6 +386,31 @@ impl Serialize for IndexItemFunctionType {
} }
} }
#[derive(Debug)]
pub struct TypeWithKind {
ty: RenderType,
kind: TypeKind,
}
impl From<(RenderType, TypeKind)> for TypeWithKind {
fn from(x: (RenderType, TypeKind)) -> TypeWithKind {
TypeWithKind { ty: x.0, kind: x.1 }
}
}
impl Serialize for TypeWithKind {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut seq = serializer.serialize_seq(None)?;
seq.serialize_element(&self.ty.name)?;
let x: ItemType = self.kind.into();
seq.serialize_element(&x)?;
seq.end()
}
}
thread_local!(static CACHE_KEY: RefCell<Arc<Cache>> = Default::default()); thread_local!(static CACHE_KEY: RefCell<Arc<Cache>> = Default::default());
thread_local!(pub static CURRENT_DEPTH: Cell<usize> = Cell::new(0)); thread_local!(pub static CURRENT_DEPTH: Cell<usize> = Cell::new(0));

View file

@ -12,7 +12,7 @@ use std::path::{Path, PathBuf};
use serde::Serialize; use serde::Serialize;
use super::{plain_summary_line, shorten, Impl, IndexItem, IndexItemFunctionType, ItemType}; use super::{plain_summary_line, shorten, Impl, IndexItem, IndexItemFunctionType, ItemType};
use super::{RenderInfo, Type}; use super::{Generic, RenderInfo, RenderType, TypeWithKind};
/// Indicates where an external crate can be found. /// Indicates where an external crate can be found.
pub enum ExternalLocation { pub enum ExternalLocation {
@ -588,17 +588,20 @@ fn build_index(krate: &clean::Crate, cache: &mut Cache) -> String {
let mut lastpathid = 0usize; let mut lastpathid = 0usize;
for item in search_index { for item in search_index {
item.parent_idx = item.parent.map(|defid| { item.parent_idx = item.parent.and_then(|defid| {
if defid_to_pathid.contains_key(&defid) { if defid_to_pathid.contains_key(&defid) {
*defid_to_pathid.get(&defid).expect("no pathid") defid_to_pathid.get(&defid).map(|x| *x)
} else { } else {
let pathid = lastpathid; let pathid = lastpathid;
defid_to_pathid.insert(defid, pathid); defid_to_pathid.insert(defid, pathid);
lastpathid += 1; lastpathid += 1;
let &(ref fqp, short) = paths.get(&defid).unwrap(); if let Some(&(ref fqp, short)) = paths.get(&defid) {
crate_paths.push((short, fqp.last().unwrap().clone())); crate_paths.push((short, fqp.last().unwrap().clone()));
pathid Some(pathid)
} else {
None
}
} }
}); });
@ -647,20 +650,25 @@ fn get_index_search_type(item: &clean::Item) -> Option<IndexItemFunctionType> {
_ => return None, _ => return None,
}; };
let inputs = let inputs = all_types
all_types.iter().map(|arg| get_index_type(&arg)).filter(|a| a.name.is_some()).collect(); .iter()
.map(|(ty, kind)| TypeWithKind::from((get_index_type(&ty), *kind)))
.filter(|a| a.ty.name.is_some())
.collect();
let output = ret_types let output = ret_types
.iter() .iter()
.map(|arg| get_index_type(&arg)) .map(|(ty, kind)| TypeWithKind::from((get_index_type(&ty), *kind)))
.filter(|a| a.name.is_some()) .filter(|a| a.ty.name.is_some())
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let output = if output.is_empty() { None } else { Some(output) }; let output = if output.is_empty() { None } else { Some(output) };
Some(IndexItemFunctionType { inputs, output }) Some(IndexItemFunctionType { inputs, output })
} }
fn get_index_type(clean_type: &clean::Type) -> Type { fn get_index_type(clean_type: &clean::Type) -> RenderType {
let t = Type { let t = RenderType {
ty: clean_type.def_id(),
idx: None,
name: get_index_type_name(clean_type, true).map(|s| s.to_ascii_lowercase()), name: get_index_type_name(clean_type, true).map(|s| s.to_ascii_lowercase()),
generics: get_generics(clean_type), generics: get_generics(clean_type),
}; };
@ -685,12 +693,17 @@ fn get_index_type_name(clean_type: &clean::Type, accept_generic: bool) -> Option
} }
} }
fn get_generics(clean_type: &clean::Type) -> Option<Vec<String>> { fn get_generics(clean_type: &clean::Type) -> Option<Vec<Generic>> {
clean_type.generics().and_then(|types| { clean_type.generics().and_then(|types| {
let r = types let r = types
.iter() .iter()
.filter_map(|t| get_index_type_name(t, false)) .filter_map(|t| {
.map(|s| s.to_ascii_lowercase()) if let Some(name) = get_index_type_name(t, false) {
Some(Generic { name: name.to_ascii_lowercase(), defid: t.def_id(), idx: None })
} else {
None
}
})
.collect::<Vec<_>>(); .collect::<Vec<_>>();
if r.is_empty() { None } else { Some(r) } if r.is_empty() { None } else { Some(r) }
}) })

View file

@ -523,13 +523,14 @@ function getSearchElement() {
} }
function initSearch(rawSearchIndex) { function initSearch(rawSearchIndex) {
var currentResults, index, searchIndex;
var MAX_LEV_DISTANCE = 3; var MAX_LEV_DISTANCE = 3;
var MAX_RESULTS = 200; var MAX_RESULTS = 200;
var GENERICS_DATA = 1; var GENERICS_DATA = 1;
var NAME = 0; var NAME = 0;
var INPUTS_DATA = 0; var INPUTS_DATA = 0;
var OUTPUT_DATA = 1; var OUTPUT_DATA = 1;
var NO_TYPE_FILTER = -1;
var currentResults, index, searchIndex;
var params = getQueryStringParams(); var params = getQueryStringParams();
// Populate search bar with query string search term when provided, // Populate search bar with query string search term when provided,
@ -556,7 +557,7 @@ function getSearchElement() {
return i; return i;
} }
} }
return -1; return NO_TYPE_FILTER;
} }
var valLower = query.query.toLowerCase(), var valLower = query.query.toLowerCase(),
@ -719,6 +720,13 @@ function getSearchElement() {
}; };
} }
function getObjectFromId(id) {
if (typeof id === "number") {
return searchIndex[id];
}
return {'name': id};
}
function checkGenerics(obj, val) { function checkGenerics(obj, val) {
// The names match, but we need to be sure that all generics kinda // The names match, but we need to be sure that all generics kinda
// match as well. // match as well.
@ -735,8 +743,10 @@ function getSearchElement() {
for (var y = 0; y < vlength; ++y) { for (var y = 0; y < vlength; ++y) {
var lev = { pos: -1, lev: MAX_LEV_DISTANCE + 1}; var lev = { pos: -1, lev: MAX_LEV_DISTANCE + 1};
var elength = elems.length; var elength = elems.length;
var firstGeneric = getObjectFromId(val.generics[y]).name;
for (var x = 0; x < elength; ++x) { for (var x = 0; x < elength; ++x) {
var tmp_lev = levenshtein(elems[x], val.generics[y]); var tmp_lev = levenshtein(getObjectFromId(elems[x]).name,
firstGeneric);
if (tmp_lev < lev.lev) { if (tmp_lev < lev.lev) {
lev.lev = tmp_lev; lev.lev = tmp_lev;
lev.pos = x; lev.pos = x;
@ -771,8 +781,9 @@ function getSearchElement() {
for (var y = 0; allFound === true && y < val.generics.length; ++y) { for (var y = 0; allFound === true && y < val.generics.length; ++y) {
allFound = false; allFound = false;
var firstGeneric = getObjectFromId(val.generics[y]).name;
for (x = 0; allFound === false && x < elems.length; ++x) { for (x = 0; allFound === false && x < elems.length; ++x) {
allFound = elems[x] === val.generics[y]; allFound = getObjectFromId(elems[x]).name === firstGeneric;
} }
if (allFound === true) { if (allFound === true) {
elems.splice(x - 1, 1); elems.splice(x - 1, 1);
@ -829,16 +840,22 @@ function getSearchElement() {
return lev_distance + 1; return lev_distance + 1;
} }
function findArg(obj, val, literalSearch) { function findArg(obj, val, literalSearch, typeFilter) {
var lev_distance = MAX_LEV_DISTANCE + 1; var lev_distance = MAX_LEV_DISTANCE + 1;
if (obj && obj.type && obj.type[INPUTS_DATA] && if (obj && obj.type && obj.type[INPUTS_DATA] && obj.type[INPUTS_DATA].length > 0) {
obj.type[INPUTS_DATA].length > 0) {
var length = obj.type[INPUTS_DATA].length; var length = obj.type[INPUTS_DATA].length;
for (var i = 0; i < length; i++) { for (var i = 0; i < length; i++) {
var tmp = checkType(obj.type[INPUTS_DATA][i], val, literalSearch); var tmp = obj.type[INPUTS_DATA][i];
if (literalSearch === true && tmp === true) { if (typePassesFilter(typeFilter, tmp[1]) === false) {
return true; continue;
}
tmp = checkType(tmp, val, literalSearch);
if (literalSearch === true) {
if (tmp === true) {
return true;
}
continue;
} }
lev_distance = Math.min(tmp, lev_distance); lev_distance = Math.min(tmp, lev_distance);
if (lev_distance === 0) { if (lev_distance === 0) {
@ -849,20 +866,20 @@ function getSearchElement() {
return literalSearch === true ? false : lev_distance; return literalSearch === true ? false : lev_distance;
} }
function checkReturned(obj, val, literalSearch) { function checkReturned(obj, val, literalSearch, typeFilter) {
var lev_distance = MAX_LEV_DISTANCE + 1; var lev_distance = MAX_LEV_DISTANCE + 1;
if (obj && obj.type && obj.type.length > OUTPUT_DATA) { if (obj && obj.type && obj.type.length > OUTPUT_DATA) {
var ret = obj.type[OUTPUT_DATA]; var ret = obj.type[OUTPUT_DATA];
if (!obj.type[OUTPUT_DATA].length) { if (typeof ret[0] === "string") {
ret = [ret]; ret = [ret];
} }
for (var x = 0; x < ret.length; ++x) { for (var x = 0; x < ret.length; ++x) {
var r = ret[x]; var tmp = ret[x];
if (typeof r === "string") { if (typePassesFilter(typeFilter, tmp[1]) === false) {
r = [r]; continue;
} }
var tmp = checkType(r, val, literalSearch); tmp = checkType(tmp, val, literalSearch);
if (literalSearch === true) { if (literalSearch === true) {
if (tmp === true) { if (tmp === true) {
return true; return true;
@ -917,7 +934,7 @@ function getSearchElement() {
function typePassesFilter(filter, type) { function typePassesFilter(filter, type) {
// No filter // No filter
if (filter < 0) return true; if (filter <= NO_TYPE_FILTER) return true;
// Exact match // Exact match
if (filter === type) return true; if (filter === type) return true;
@ -926,11 +943,13 @@ function getSearchElement() {
var name = itemTypes[type]; var name = itemTypes[type];
switch (itemTypes[filter]) { switch (itemTypes[filter]) {
case "constant": case "constant":
return (name == "associatedconstant"); return name === "associatedconstant";
case "fn": case "fn":
return (name == "method" || name == "tymethod"); return name === "method" || name === "tymethod";
case "type": case "type":
return (name == "primitive" || name == "keyword"); return name === "primitive" || name === "associatedtype";
case "trait":
return name === "traitalias";
} }
// No match // No match
@ -959,42 +978,33 @@ function getSearchElement() {
if (filterCrates !== undefined && searchIndex[i].crate !== filterCrates) { if (filterCrates !== undefined && searchIndex[i].crate !== filterCrates) {
continue; continue;
} }
in_args = findArg(searchIndex[i], val, true); in_args = findArg(searchIndex[i], val, true, typeFilter);
returned = checkReturned(searchIndex[i], val, true); returned = checkReturned(searchIndex[i], val, true, typeFilter);
ty = searchIndex[i]; ty = searchIndex[i];
fullId = generateId(ty); fullId = generateId(ty);
if (searchWords[i] === val.name) { if (searchWords[i] === val.name
// filter type: ... queries && typePassesFilter(typeFilter, searchIndex[i].ty)
if (typePassesFilter(typeFilter, searchIndex[i].ty) && && results[fullId] === undefined) {
results[fullId] === undefined) results[fullId] = {
{ id: i,
results[fullId] = {id: i, index: -1}; index: -1,
} dontValidate: true,
} else if ((in_args === true || returned === true) && };
typePassesFilter(typeFilter, searchIndex[i].ty)) { }
if (in_args === true || returned === true) { if (in_args === true && results_in_args[fullId] === undefined) {
if (in_args === true) { results_in_args[fullId] = {
results_in_args[fullId] = { id: i,
id: i, index: -1,
index: -1, dontValidate: true,
dontValidate: true, };
}; }
} if (returned === true && results_returned[fullId] === undefined) {
if (returned === true) { results_returned[fullId] = {
results_returned[fullId] = { id: i,
id: i, index: -1,
index: -1, dontValidate: true,
dontValidate: true, };
};
}
} else {
results[fullId] = {
id: i,
index: -1,
dontValidate: true,
};
}
} }
} }
query.inputs = [val]; query.inputs = [val];
@ -1025,7 +1035,7 @@ function getSearchElement() {
// allow searching for void (no output) functions as well // allow searching for void (no output) functions as well
var typeOutput = type.length > OUTPUT_DATA ? type[OUTPUT_DATA].name : ""; var typeOutput = type.length > OUTPUT_DATA ? type[OUTPUT_DATA].name : "";
returned = checkReturned(ty, output, true); returned = checkReturned(ty, output, true, NO_TYPE_FILTER);
if (output.name === "*" || returned === true) { if (output.name === "*" || returned === true) {
in_args = false; in_args = false;
var is_module = false; var is_module = false;
@ -1126,16 +1136,8 @@ function getSearchElement() {
lev += 1; lev += 1;
} }
} }
if ((in_args = findArg(ty, valGenerics)) <= MAX_LEV_DISTANCE) { in_args = findArg(ty, valGenerics, false, typeFilter);
if (typePassesFilter(typeFilter, ty.ty) === false) { returned = checkReturned(ty, valGenerics, false, typeFilter);
in_args = MAX_LEV_DISTANCE + 1;
}
}
if ((returned = checkReturned(ty, valGenerics)) <= MAX_LEV_DISTANCE) {
if (typePassesFilter(typeFilter, ty.ty) === false) {
returned = MAX_LEV_DISTANCE + 1;
}
}
lev += lev_add; lev += lev_add;
if (lev > 0 && val.length > 3 && searchWords[j].indexOf(val) > -1) { if (lev > 0 && val.length > 3 && searchWords[j].indexOf(val) > -1) {

View file

@ -0,0 +1,10 @@
const QUERY = 'struct:"string"';
const EXPECTED = {
'in_args': [
{ 'path': 'std::string::String', 'name': 'ne' },
],
'returned': [
{ 'path': 'std::string::String', 'name': 'add' },
],
};

View file

@ -0,0 +1,10 @@
const QUERY = 'struct:string';
const EXPECTED = {
'in_args': [
{ 'path': 'std::string::String', 'name': 'ne' },
],
'returned': [
{ 'path': 'std::string::String', 'name': 'add' },
],
};

View file

@ -263,7 +263,7 @@ function main(argv) {
finalJS = ""; finalJS = "";
var arraysToLoad = ["itemTypes"]; var arraysToLoad = ["itemTypes"];
var variablesToLoad = ["MAX_LEV_DISTANCE", "MAX_RESULTS", var variablesToLoad = ["MAX_LEV_DISTANCE", "MAX_RESULTS", "NO_TYPE_FILTER",
"GENERICS_DATA", "NAME", "INPUTS_DATA", "OUTPUT_DATA", "GENERICS_DATA", "NAME", "INPUTS_DATA", "OUTPUT_DATA",
"TY_PRIMITIVE", "TY_KEYWORD", "TY_PRIMITIVE", "TY_KEYWORD",
"levenshtein_row2"]; "levenshtein_row2"];
@ -336,7 +336,7 @@ function main(argv) {
console.log("OK"); console.log("OK");
} }
}); });
return errors; return errors > 0 ? 1 : 0;
} }
process.exit(main(process.argv)); process.exit(main(process.argv));

View file

@ -231,7 +231,7 @@ function load_files(out_folder, crate) {
finalJS = ""; finalJS = "";
var arraysToLoad = ["itemTypes"]; var arraysToLoad = ["itemTypes"];
var variablesToLoad = ["MAX_LEV_DISTANCE", "MAX_RESULTS", var variablesToLoad = ["MAX_LEV_DISTANCE", "MAX_RESULTS", "NO_TYPE_FILTER",
"GENERICS_DATA", "NAME", "INPUTS_DATA", "OUTPUT_DATA", "GENERICS_DATA", "NAME", "INPUTS_DATA", "OUTPUT_DATA",
"TY_PRIMITIVE", "TY_KEYWORD", "TY_PRIMITIVE", "TY_KEYWORD",
"levenshtein_row2"]; "levenshtein_row2"];
@ -328,7 +328,7 @@ function main(argv) {
console.log("OK"); console.log("OK");
} }
} }
return errors; return errors > 0 ? 1 : 0;
} }
process.exit(main(process.argv)); process.exit(main(process.argv));