Simplify search-index serialization
This commit is contained in:
parent
810a514029
commit
81f673d3bc
1 changed files with 100 additions and 118 deletions
|
@ -52,7 +52,7 @@ use std::sync::Arc;
|
||||||
|
|
||||||
use externalfiles::ExternalHtml;
|
use externalfiles::ExternalHtml;
|
||||||
|
|
||||||
use serialize::json::as_json;
|
use serialize::json::{ToJson, Json, as_json};
|
||||||
use syntax::{abi, ast};
|
use syntax::{abi, ast};
|
||||||
use syntax::feature_gate::UnstableFeatures;
|
use syntax::feature_gate::UnstableFeatures;
|
||||||
use rustc::middle::cstore::LOCAL_CRATE;
|
use rustc::middle::cstore::LOCAL_CRATE;
|
||||||
|
@ -290,22 +290,40 @@ struct IndexItem {
|
||||||
path: String,
|
path: String,
|
||||||
desc: String,
|
desc: String,
|
||||||
parent: Option<DefId>,
|
parent: Option<DefId>,
|
||||||
|
parent_idx: Option<usize>,
|
||||||
search_type: Option<IndexItemFunctionType>,
|
search_type: Option<IndexItemFunctionType>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl ToJson for IndexItem {
|
||||||
|
fn to_json(&self) -> Json {
|
||||||
|
assert_eq!(self.parent.is_some(), self.parent_idx.is_some());
|
||||||
|
|
||||||
|
let mut data = Vec::with_capacity(6);
|
||||||
|
data.push((self.ty as usize).to_json());
|
||||||
|
data.push(self.name.to_json());
|
||||||
|
data.push(self.path.to_json());
|
||||||
|
data.push(self.desc.to_json());
|
||||||
|
data.push(self.parent_idx.to_json());
|
||||||
|
data.push(self.search_type.to_json());
|
||||||
|
|
||||||
|
Json::Array(data)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// A type used for the search index.
|
/// A type used for the search index.
|
||||||
struct Type {
|
struct Type {
|
||||||
name: Option<String>,
|
name: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for Type {
|
impl ToJson for Type {
|
||||||
/// Formats type as {name: $name}.
|
fn to_json(&self) -> Json {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
||||||
// Wrapping struct fmt should never call us when self.name is None,
|
|
||||||
// but just to be safe we write `null` in that case.
|
|
||||||
match self.name {
|
match self.name {
|
||||||
Some(ref n) => write!(f, "{{\"name\":\"{}\"}}", n),
|
Some(ref name) => {
|
||||||
None => write!(f, "null")
|
let mut data = BTreeMap::new();
|
||||||
|
data.insert("name".to_owned(), name.to_json());
|
||||||
|
Json::Object(data)
|
||||||
|
},
|
||||||
|
None => Json::Null
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -316,26 +334,17 @@ struct IndexItemFunctionType {
|
||||||
output: Option<Type>
|
output: Option<Type>
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for IndexItemFunctionType {
|
impl ToJson for IndexItemFunctionType {
|
||||||
/// Formats a full fn type as a JSON {inputs: [Type], outputs: Type/null}.
|
fn to_json(&self) -> Json {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
||||||
// If we couldn't figure out a type, just write `null`.
|
// If we couldn't figure out a type, just write `null`.
|
||||||
if self.inputs.iter().any(|ref i| i.name.is_none()) ||
|
if self.inputs.iter().chain(self.output.iter()).any(|ref i| i.name.is_none()) {
|
||||||
(self.output.is_some() && self.output.as_ref().unwrap().name.is_none()) {
|
Json::Null
|
||||||
return write!(f, "null")
|
} else {
|
||||||
|
let mut data = BTreeMap::new();
|
||||||
|
data.insert("inputs".to_owned(), self.inputs.to_json());
|
||||||
|
data.insert("output".to_owned(), self.output.to_json());
|
||||||
|
Json::Object(data)
|
||||||
}
|
}
|
||||||
|
|
||||||
let inputs: Vec<String> = self.inputs.iter().map(|ref t| {
|
|
||||||
format!("{}", t)
|
|
||||||
}).collect();
|
|
||||||
try!(write!(f, "{{\"inputs\":[{}],\"output\":", inputs.join(",")));
|
|
||||||
|
|
||||||
match self.output {
|
|
||||||
Some(ref t) => try!(write!(f, "{}", t)),
|
|
||||||
None => try!(write!(f, "null"))
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(try!(write!(f, "}}")))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -537,8 +546,9 @@ pub fn run(mut krate: clean::Crate,
|
||||||
/// Build the search index from the collected metadata
|
/// Build the search index from the collected metadata
|
||||||
fn build_index(krate: &clean::Crate, cache: &mut Cache) -> String {
|
fn build_index(krate: &clean::Crate, cache: &mut Cache) -> String {
|
||||||
let mut nodeid_to_pathid = HashMap::new();
|
let mut nodeid_to_pathid = HashMap::new();
|
||||||
let mut pathid_to_nodeid = Vec::new();
|
let mut crate_items = Vec::with_capacity(cache.search_index.len());
|
||||||
{
|
let mut crate_paths = Vec::<Json>::new();
|
||||||
|
|
||||||
let Cache { ref mut search_index,
|
let Cache { ref mut search_index,
|
||||||
ref orphan_methods,
|
ref orphan_methods,
|
||||||
ref mut paths, .. } = *cache;
|
ref mut paths, .. } = *cache;
|
||||||
|
@ -556,6 +566,7 @@ fn build_index(krate: &clean::Crate, cache: &mut Cache) -> String {
|
||||||
path: fqp[..fqp.len() - 1].join("::"),
|
path: fqp[..fqp.len() - 1].join("::"),
|
||||||
desc: Escape(&shorter(item.doc_value())).to_string(),
|
desc: Escape(&shorter(item.doc_value())).to_string(),
|
||||||
parent: Some(did),
|
parent: Some(did),
|
||||||
|
parent_idx: None,
|
||||||
search_type: get_index_search_type(&item, parent_basename),
|
search_type: get_index_search_type(&item, parent_basename),
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
|
@ -565,76 +576,46 @@ fn build_index(krate: &clean::Crate, cache: &mut Cache) -> String {
|
||||||
|
|
||||||
// Reduce `NodeId` in paths into smaller sequential numbers,
|
// Reduce `NodeId` in paths into smaller sequential numbers,
|
||||||
// and prune the paths that do not appear in the index.
|
// and prune the paths that do not appear in the index.
|
||||||
for item in search_index.iter() {
|
let mut lastpath = String::new();
|
||||||
match item.parent {
|
let mut lastpathid = 0usize;
|
||||||
Some(nodeid) => {
|
|
||||||
if !nodeid_to_pathid.contains_key(&nodeid) {
|
for item in search_index {
|
||||||
let pathid = pathid_to_nodeid.len();
|
item.parent_idx = item.parent.map(|nodeid| {
|
||||||
|
if nodeid_to_pathid.contains_key(&nodeid) {
|
||||||
|
*nodeid_to_pathid.get(&nodeid).unwrap()
|
||||||
|
} else {
|
||||||
|
let pathid = lastpathid;
|
||||||
nodeid_to_pathid.insert(nodeid, pathid);
|
nodeid_to_pathid.insert(nodeid, pathid);
|
||||||
pathid_to_nodeid.push(nodeid);
|
lastpathid += 1;
|
||||||
|
|
||||||
|
let &(ref fqp, short) = paths.get(&nodeid).unwrap();
|
||||||
|
crate_paths.push(((short as usize), fqp.last().unwrap().clone()).to_json());
|
||||||
|
pathid
|
||||||
}
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Omit the parent path if it is same to that of the prior item.
|
||||||
|
if lastpath == item.path {
|
||||||
|
item.path.clear();
|
||||||
|
} else {
|
||||||
|
lastpath = item.path.clone();
|
||||||
}
|
}
|
||||||
None => {}
|
crate_items.push(item.to_json());
|
||||||
}
|
|
||||||
}
|
|
||||||
assert_eq!(nodeid_to_pathid.len(), pathid_to_nodeid.len());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let crate_doc = krate.module.as_ref().map(|module| {
|
||||||
|
Escape(&shorter(module.doc_value())).to_string()
|
||||||
|
}).unwrap_or(String::new());
|
||||||
|
|
||||||
|
let mut crate_data = BTreeMap::new();
|
||||||
|
crate_data.insert("doc".to_owned(), Json::String(crate_doc));
|
||||||
|
crate_data.insert("items".to_owned(), Json::Array(crate_items));
|
||||||
|
crate_data.insert("paths".to_owned(), Json::Array(crate_paths));
|
||||||
|
|
||||||
// Collect the index into a string
|
// Collect the index into a string
|
||||||
let mut w = io::Cursor::new(Vec::new());
|
format!("searchIndex[{}] = {};",
|
||||||
let krate_doc = krate.module.as_ref().map(|module| {
|
|
||||||
Escape(&shorter(module.doc_value())).to_string()
|
|
||||||
}).unwrap_or("".to_owned());
|
|
||||||
|
|
||||||
write!(&mut w, r#"searchIndex[{}] = {{doc: {}, "items":["#,
|
|
||||||
as_json(&krate.name),
|
as_json(&krate.name),
|
||||||
as_json(&krate_doc)).unwrap();
|
Json::Object(crate_data))
|
||||||
|
|
||||||
let mut lastpath = "".to_string();
|
|
||||||
for (i, item) in cache.search_index.iter().enumerate() {
|
|
||||||
// Omit the path if it is same to that of the prior item.
|
|
||||||
let path;
|
|
||||||
if lastpath == item.path {
|
|
||||||
path = "";
|
|
||||||
} else {
|
|
||||||
lastpath = item.path.to_string();
|
|
||||||
path = &item.path;
|
|
||||||
};
|
|
||||||
|
|
||||||
if i > 0 {
|
|
||||||
write!(&mut w, ",").unwrap();
|
|
||||||
}
|
|
||||||
write!(&mut w, "[{},{},{},{}",
|
|
||||||
item.ty as usize,
|
|
||||||
as_json(&item.name), as_json(&path), as_json(&item.desc)).unwrap();
|
|
||||||
match item.parent {
|
|
||||||
Some(nodeid) => {
|
|
||||||
let pathid = *nodeid_to_pathid.get(&nodeid).unwrap();
|
|
||||||
write!(&mut w, ",{}", pathid).unwrap();
|
|
||||||
}
|
|
||||||
None => write!(&mut w, ",null").unwrap()
|
|
||||||
}
|
|
||||||
match item.search_type {
|
|
||||||
Some(ref t) => write!(&mut w, ",{}", t).unwrap(),
|
|
||||||
None => write!(&mut w, ",null").unwrap()
|
|
||||||
}
|
|
||||||
write!(&mut w, "]").unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
write!(&mut w, r#"],"paths":["#).unwrap();
|
|
||||||
|
|
||||||
for (i, &did) in pathid_to_nodeid.iter().enumerate() {
|
|
||||||
let &(ref fqp, short) = cache.paths.get(&did).unwrap();
|
|
||||||
if i > 0 {
|
|
||||||
write!(&mut w, ",").unwrap();
|
|
||||||
}
|
|
||||||
write!(&mut w, r#"[{},"{}"]"#,
|
|
||||||
short as usize, *fqp.last().unwrap()).unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
write!(&mut w, "]}};").unwrap();
|
|
||||||
|
|
||||||
String::from_utf8(w.into_inner()).unwrap()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write_shared(cx: &Context,
|
fn write_shared(cx: &Context,
|
||||||
|
@ -1073,6 +1054,7 @@ impl DocFolder for Cache {
|
||||||
path: path.join("::").to_string(),
|
path: path.join("::").to_string(),
|
||||||
desc: Escape(&shorter(item.doc_value())).to_string(),
|
desc: Escape(&shorter(item.doc_value())).to_string(),
|
||||||
parent: parent,
|
parent: parent,
|
||||||
|
parent_idx: None,
|
||||||
search_type: get_index_search_type(&item, parent_basename),
|
search_type: get_index_search_type(&item, parent_basename),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue