1
Fork 0

Make Decodable and Decoder infallible.

`Decoder` has two impls:
- opaque: this impl is already partly infallible, i.e. in some places it
  currently panics on failure (e.g. if the input is too short, or on a
  bad `Result` discriminant), and in some places it returns an error
  (e.g. on a bad `Option` discriminant). The number of places where
  either happens is surprisingly small, just because the binary
  representation has very little redundancy and a lot of input reading
  can occur even on malformed data.
- json: this impl is fully fallible, but it's only used (a) for the
  `.rlink` file production, and there's a `FIXME` comment suggesting it
  should change to a binary format, and (b) in a few tests in
  non-fundamental ways. Indeed #85993 is open to remove it entirely.

And the top-level places in the compiler that call into decoding just
abort on error anyway. So the fallibility is providing little value, and
getting rid of it leads to some non-trivial performance improvements.

Much of this commit is pretty boring and mechanical. Some notes about
a few interesting parts:
- The commit removes `Decoder::{Error,error}`.
- `InternIteratorElement::intern_with`: the impl for `T` now has the same
  optimization for small counts that the impl for `Result<T, E>` has,
  because it's now much hotter.
- Decodable impls for SmallVec, LinkedList, VecDeque now all use
  `collect`, which is nice; the one for `Vec` uses unsafe code, because
  that gave better perf on some benchmarks.
This commit is contained in:
Nicholas Nethercote 2022-01-18 13:22:50 +11:00
parent 88600a6d7f
commit 416399dc10
39 changed files with 726 additions and 781 deletions

View file

@ -2418,8 +2418,9 @@ impl<S: Encoder> rustc_serialize::Encodable<S> for AttrId {
} }
impl<D: Decoder> rustc_serialize::Decodable<D> for AttrId { impl<D: Decoder> rustc_serialize::Decodable<D> for AttrId {
fn decode(d: &mut D) -> Result<AttrId, D::Error> { fn decode(d: &mut D) -> AttrId {
d.read_unit().map(|_| crate::attr::mk_attr_id()) d.read_unit();
crate::attr::mk_attr_id()
} }
} }

View file

@ -115,8 +115,8 @@ impl<T> fmt::Pointer for P<T> {
} }
impl<D: Decoder, T: 'static + Decodable<D>> Decodable<D> for P<T> { impl<D: Decoder, T: 'static + Decodable<D>> Decodable<D> for P<T> {
fn decode(d: &mut D) -> Result<P<T>, D::Error> { fn decode(d: &mut D) -> P<T> {
Decodable::decode(d).map(P) P(Decodable::decode(d))
} }
} }
@ -204,8 +204,8 @@ impl<S: Encoder, T: Encodable<S>> Encodable<S> for P<[T]> {
} }
impl<D: Decoder, T: Decodable<D>> Decodable<D> for P<[T]> { impl<D: Decoder, T: Decodable<D>> Decodable<D> for P<[T]> {
fn decode(d: &mut D) -> Result<P<[T]>, D::Error> { fn decode(d: &mut D) -> P<[T]> {
Ok(P::from_vec(Decodable::decode(d)?)) P::from_vec(Decodable::decode(d))
} }
} }

View file

@ -163,7 +163,7 @@ impl<S: Encoder> Encodable<S> for LazyTokenStream {
} }
impl<D: Decoder> Decodable<D> for LazyTokenStream { impl<D: Decoder> Decodable<D> for LazyTokenStream {
fn decode(_d: &mut D) -> Result<Self, D::Error> { fn decode(_d: &mut D) -> Self {
panic!("Attempted to decode LazyTokenStream"); panic!("Attempted to decode LazyTokenStream");
} }
} }

View file

@ -149,10 +149,10 @@ impl<E: rustc_serialize::Encoder> Encodable<E> for Fingerprint {
impl<D: rustc_serialize::Decoder> Decodable<D> for Fingerprint { impl<D: rustc_serialize::Decoder> Decodable<D> for Fingerprint {
#[inline] #[inline]
fn decode(d: &mut D) -> Result<Self, D::Error> { fn decode(d: &mut D) -> Self {
let mut bytes = [0u8; 16]; let mut bytes = [0u8; 16];
d.read_raw_bytes_into(&mut bytes)?; d.read_raw_bytes_into(&mut bytes);
Ok(Fingerprint::from_le_bytes(bytes)) Fingerprint::from_le_bytes(bytes)
} }
} }
@ -195,8 +195,8 @@ impl<E: rustc_serialize::Encoder> Encodable<E> for PackedFingerprint {
impl<D: rustc_serialize::Decoder> Decodable<D> for PackedFingerprint { impl<D: rustc_serialize::Decoder> Decodable<D> for PackedFingerprint {
#[inline] #[inline]
fn decode(d: &mut D) -> Result<Self, D::Error> { fn decode(d: &mut D) -> Self {
Fingerprint::decode(d).map(PackedFingerprint) Self(Fingerprint::decode(d))
} }
} }

View file

@ -55,8 +55,8 @@ impl<S: Encoder> Encodable<S> for Svh {
} }
impl<D: Decoder> Decodable<D> for Svh { impl<D: Decoder> Decodable<D> for Svh {
fn decode(d: &mut D) -> Result<Svh, D::Error> { fn decode(d: &mut D) -> Svh {
d.read_u64().map(u64::from_le).map(Svh::new) Svh::new(u64::from_le(d.read_u64()))
} }
} }

View file

@ -597,10 +597,7 @@ impl RustcDefaultCalls {
let rlink_data = fs::read_to_string(file).unwrap_or_else(|err| { let rlink_data = fs::read_to_string(file).unwrap_or_else(|err| {
sess.fatal(&format!("failed to read rlink file: {}", err)); sess.fatal(&format!("failed to read rlink file: {}", err));
}); });
let codegen_results: CodegenResults = let codegen_results: CodegenResults = json::decode(&rlink_data);
json::decode(&rlink_data).unwrap_or_else(|err| {
sess.fatal(&format!("failed to decode rlink: {}", err));
});
let result = compiler.codegen_backend().link(sess, codegen_results, &outputs); let result = compiler.codegen_backend().link(sess, codegen_results, &outputs);
abort_on_err(result, sess); abort_on_err(result, sess);
} else { } else {

View file

@ -64,7 +64,7 @@ fn test_positions(code: &str, span: (u32, u32), expected_output: SpanTestData) {
let bytes = output.lock().unwrap(); let bytes = output.lock().unwrap();
let actual_output = str::from_utf8(&bytes).unwrap(); let actual_output = str::from_utf8(&bytes).unwrap();
let actual_output: TestData = decode(actual_output).unwrap(); let actual_output: TestData = decode(actual_output);
assert_eq!(expected_output, actual_output) assert_eq!(expected_output, actual_output)
}) })

View file

@ -99,8 +99,8 @@ impl Hash for ToolMetadata {
// Doesn't really need to round-trip // Doesn't really need to round-trip
impl<D: Decoder> Decodable<D> for ToolMetadata { impl<D: Decoder> Decodable<D> for ToolMetadata {
fn decode(_d: &mut D) -> Result<Self, D::Error> { fn decode(_d: &mut D) -> Self {
Ok(ToolMetadata(None)) ToolMetadata(None)
} }
} }

View file

@ -158,14 +158,7 @@ pub fn load_dep_graph(sess: &Session) -> DepGraphFuture {
// Decode the list of work_products // Decode the list of work_products
let mut work_product_decoder = Decoder::new(&work_products_data[..], start_pos); let mut work_product_decoder = Decoder::new(&work_products_data[..], start_pos);
let work_products: Vec<SerializedWorkProduct> = let work_products: Vec<SerializedWorkProduct> =
Decodable::decode(&mut work_product_decoder).unwrap_or_else(|e| { Decodable::decode(&mut work_product_decoder);
let msg = format!(
"Error decoding `work-products` from incremental \
compilation session directory: {}",
e
);
sess.fatal(&msg)
});
for swp in work_products { for swp in work_products {
let mut all_files_exist = true; let mut all_files_exist = true;
@ -203,8 +196,7 @@ pub fn load_dep_graph(sess: &Session) -> DepGraphFuture {
LoadResult::Error { message } => LoadResult::Error { message }, LoadResult::Error { message } => LoadResult::Error { message },
LoadResult::Ok { data: (bytes, start_pos) } => { LoadResult::Ok { data: (bytes, start_pos) } => {
let mut decoder = Decoder::new(&bytes, start_pos); let mut decoder = Decoder::new(&bytes, start_pos);
let prev_commandline_args_hash = u64::decode(&mut decoder) let prev_commandline_args_hash = u64::decode(&mut decoder);
.expect("Error reading commandline arg hash from cached dep-graph");
if prev_commandline_args_hash != expected_hash { if prev_commandline_args_hash != expected_hash {
if report_incremental_info { if report_incremental_info {
@ -220,8 +212,7 @@ pub fn load_dep_graph(sess: &Session) -> DepGraphFuture {
return LoadResult::DataOutOfDate; return LoadResult::DataOutOfDate;
} }
let dep_graph = SerializedDepGraph::decode(&mut decoder) let dep_graph = SerializedDepGraph::decode(&mut decoder);
.expect("Error reading cached dep-graph");
LoadResult::Ok { data: (dep_graph, prev_work_products) } LoadResult::Ok { data: (dep_graph, prev_work_products) }
} }

View file

@ -395,8 +395,8 @@ macro_rules! newtype_index {
(@serializable $type:ident) => ( (@serializable $type:ident) => (
impl<D: ::rustc_serialize::Decoder> ::rustc_serialize::Decodable<D> for $type { impl<D: ::rustc_serialize::Decoder> ::rustc_serialize::Decodable<D> for $type {
fn decode(d: &mut D) -> Result<Self, D::Error> { fn decode(d: &mut D) -> Self {
d.read_u32().map(Self::from_u32) Self::from_u32(d.read_u32())
} }
} }
impl<E: ::rustc_serialize::Encoder> ::rustc_serialize::Encodable<E> for $type { impl<E: ::rustc_serialize::Encoder> ::rustc_serialize::Encodable<E> for $type {
@ -527,8 +527,8 @@ impl<S: Encoder, I: Idx, T: Encodable<S>> Encodable<S> for &IndexVec<I, T> {
} }
impl<D: Decoder, I: Idx, T: Decodable<D>> Decodable<D> for IndexVec<I, T> { impl<D: Decoder, I: Idx, T: Decodable<D>> Decodable<D> for IndexVec<I, T> {
fn decode(d: &mut D) -> Result<Self, D::Error> { fn decode(d: &mut D) -> Self {
Decodable::decode(d).map(|v| IndexVec { raw: v, _marker: PhantomData }) IndexVec { raw: Decodable::decode(d), _marker: PhantomData }
} }
} }

View file

@ -47,7 +47,7 @@ fn decodable_body(
quote! { quote! {
::rustc_serialize::Decoder::read_struct( ::rustc_serialize::Decoder::read_struct(
__decoder, __decoder,
|__decoder| { ::std::result::Result::Ok(#construct) }, |__decoder| { #construct },
) )
} }
} }
@ -57,7 +57,7 @@ fn decodable_body(
.enumerate() .enumerate()
.map(|(idx, vi)| { .map(|(idx, vi)| {
let construct = vi.construct(|field, index| decode_field(field, index, false)); let construct = vi.construct(|field, index| decode_field(field, index, false));
quote! { #idx => { ::std::result::Result::Ok(#construct) } } quote! { #idx => { #construct } }
}) })
.collect(); .collect();
let names: TokenStream = variants let names: TokenStream = variants
@ -82,8 +82,7 @@ fn decodable_body(
|__decoder, __variant_idx| { |__decoder, __variant_idx| {
match __variant_idx { match __variant_idx {
#match_inner #match_inner
_ => return ::std::result::Result::Err( _ => panic!(#message),
::rustc_serialize::Decoder::error(__decoder, #message)),
} }
}) })
} }
@ -95,9 +94,7 @@ fn decodable_body(
s.bound_impl( s.bound_impl(
quote!(::rustc_serialize::Decodable<#decoder_ty>), quote!(::rustc_serialize::Decodable<#decoder_ty>),
quote! { quote! {
fn decode( fn decode(__decoder: &mut #decoder_ty) -> Self {
__decoder: &mut #decoder_ty,
) -> ::std::result::Result<Self, <#decoder_ty as ::rustc_serialize::Decoder>::Error> {
#decode_body #decode_body
} }
}, },
@ -127,12 +124,7 @@ fn decode_field(field: &syn::Field, index: usize, is_struct: bool) -> proc_macro
#__decoder, #opt_field_name #decode_inner_method) #__decoder, #opt_field_name #decode_inner_method)
}; };
quote! { quote! { #decode_call }
match #decode_call {
::std::result::Result::Ok(__res) => __res,
::std::result::Result::Err(__err) => return ::std::result::Result::Err(__err),
}
}
} }
pub fn type_encodable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::TokenStream { pub fn type_encodable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::TokenStream {

View file

@ -263,7 +263,7 @@ impl<'a, 'tcx, T: Decodable<DecodeContext<'a, 'tcx>>> Lazy<T> {
fn decode<M: Metadata<'a, 'tcx>>(self, metadata: M) -> T { fn decode<M: Metadata<'a, 'tcx>>(self, metadata: M) -> T {
let mut dcx = metadata.decoder(self.position.get()); let mut dcx = metadata.decoder(self.position.get());
dcx.lazy_state = LazyState::NodeStart(self.position); dcx.lazy_state = LazyState::NodeStart(self.position);
T::decode(&mut dcx).unwrap() T::decode(&mut dcx)
} }
} }
@ -274,7 +274,7 @@ impl<'a: 'x, 'tcx: 'x, 'x, T: Decodable<DecodeContext<'a, 'tcx>>> Lazy<[T]> {
) -> impl ExactSizeIterator<Item = T> + Captures<'a> + Captures<'tcx> + 'x { ) -> impl ExactSizeIterator<Item = T> + Captures<'a> + Captures<'tcx> + 'x {
let mut dcx = metadata.decoder(self.position.get()); let mut dcx = metadata.decoder(self.position.get());
dcx.lazy_state = LazyState::NodeStart(self.position); dcx.lazy_state = LazyState::NodeStart(self.position);
(0..self.meta).map(move |_| T::decode(&mut dcx).unwrap()) (0..self.meta).map(move |_| T::decode(&mut dcx))
} }
} }
@ -300,11 +300,8 @@ impl<'a, 'tcx> DecodeContext<'a, 'tcx> {
if cnum == LOCAL_CRATE { self.cdata().cnum } else { self.cdata().cnum_map[cnum] } if cnum == LOCAL_CRATE { self.cdata().cnum } else { self.cdata().cnum_map[cnum] }
} }
fn read_lazy_with_meta<T: ?Sized + LazyMeta>( fn read_lazy_with_meta<T: ?Sized + LazyMeta>(&mut self, meta: T::Meta) -> Lazy<T> {
&mut self, let distance = self.read_usize();
meta: T::Meta,
) -> Result<Lazy<T>, <Self as Decoder>::Error> {
let distance = self.read_usize()?;
let position = match self.lazy_state { let position = match self.lazy_state {
LazyState::NoNode => bug!("read_lazy_with_meta: outside of a metadata node"), LazyState::NoNode => bug!("read_lazy_with_meta: outside of a metadata node"),
LazyState::NodeStart(start) => { LazyState::NodeStart(start) => {
@ -315,7 +312,7 @@ impl<'a, 'tcx> DecodeContext<'a, 'tcx> {
LazyState::Previous(last_pos) => last_pos.get() + distance, LazyState::Previous(last_pos) => last_pos.get() + distance,
}; };
self.lazy_state = LazyState::Previous(NonZeroUsize::new(position).unwrap()); self.lazy_state = LazyState::Previous(NonZeroUsize::new(position).unwrap());
Ok(Lazy::from_position_and_meta(NonZeroUsize::new(position).unwrap(), meta)) Lazy::from_position_and_meta(NonZeroUsize::new(position).unwrap(), meta)
} }
#[inline] #[inline]
@ -342,25 +339,21 @@ impl<'a, 'tcx> TyDecoder<'tcx> for DecodeContext<'a, 'tcx> {
self.opaque.position() self.opaque.position()
} }
fn cached_ty_for_shorthand<F>( fn cached_ty_for_shorthand<F>(&mut self, shorthand: usize, or_insert_with: F) -> Ty<'tcx>
&mut self,
shorthand: usize,
or_insert_with: F,
) -> Result<Ty<'tcx>, Self::Error>
where where
F: FnOnce(&mut Self) -> Result<Ty<'tcx>, Self::Error>, F: FnOnce(&mut Self) -> Ty<'tcx>,
{ {
let tcx = self.tcx(); let tcx = self.tcx();
let key = ty::CReaderCacheKey { cnum: Some(self.cdata().cnum), pos: shorthand }; let key = ty::CReaderCacheKey { cnum: Some(self.cdata().cnum), pos: shorthand };
if let Some(&ty) = tcx.ty_rcache.borrow().get(&key) { if let Some(&ty) = tcx.ty_rcache.borrow().get(&key) {
return Ok(ty); return ty;
} }
let ty = or_insert_with(self)?; let ty = or_insert_with(self);
tcx.ty_rcache.borrow_mut().insert(key, ty); tcx.ty_rcache.borrow_mut().insert(key, ty);
Ok(ty) ty
} }
fn with_position<F, R>(&mut self, pos: usize, f: F) -> R fn with_position<F, R>(&mut self, pos: usize, f: F) -> R
@ -376,7 +369,7 @@ impl<'a, 'tcx> TyDecoder<'tcx> for DecodeContext<'a, 'tcx> {
r r
} }
fn decode_alloc_id(&mut self) -> Result<rustc_middle::mir::interpret::AllocId, Self::Error> { fn decode_alloc_id(&mut self) -> rustc_middle::mir::interpret::AllocId {
if let Some(alloc_decoding_session) = self.alloc_decoding_session { if let Some(alloc_decoding_session) = self.alloc_decoding_session {
alloc_decoding_session.decode_alloc_id(self) alloc_decoding_session.decode_alloc_id(self)
} else { } else {
@ -386,48 +379,48 @@ impl<'a, 'tcx> TyDecoder<'tcx> for DecodeContext<'a, 'tcx> {
} }
impl<'a, 'tcx> Decodable<DecodeContext<'a, 'tcx>> for CrateNum { impl<'a, 'tcx> Decodable<DecodeContext<'a, 'tcx>> for CrateNum {
fn decode(d: &mut DecodeContext<'a, 'tcx>) -> Result<CrateNum, String> { fn decode(d: &mut DecodeContext<'a, 'tcx>) -> CrateNum {
let cnum = CrateNum::from_u32(d.read_u32()?); let cnum = CrateNum::from_u32(d.read_u32());
Ok(d.map_encoded_cnum_to_current(cnum)) d.map_encoded_cnum_to_current(cnum)
} }
} }
impl<'a, 'tcx> Decodable<DecodeContext<'a, 'tcx>> for DefIndex { impl<'a, 'tcx> Decodable<DecodeContext<'a, 'tcx>> for DefIndex {
fn decode(d: &mut DecodeContext<'a, 'tcx>) -> Result<DefIndex, String> { fn decode(d: &mut DecodeContext<'a, 'tcx>) -> DefIndex {
Ok(DefIndex::from_u32(d.read_u32()?)) DefIndex::from_u32(d.read_u32())
} }
} }
impl<'a, 'tcx> Decodable<DecodeContext<'a, 'tcx>> for ExpnIndex { impl<'a, 'tcx> Decodable<DecodeContext<'a, 'tcx>> for ExpnIndex {
fn decode(d: &mut DecodeContext<'a, 'tcx>) -> Result<ExpnIndex, String> { fn decode(d: &mut DecodeContext<'a, 'tcx>) -> ExpnIndex {
Ok(ExpnIndex::from_u32(d.read_u32()?)) ExpnIndex::from_u32(d.read_u32())
} }
} }
impl<'a, 'tcx> Decodable<DecodeContext<'a, 'tcx>> for SyntaxContext { impl<'a, 'tcx> Decodable<DecodeContext<'a, 'tcx>> for SyntaxContext {
fn decode(decoder: &mut DecodeContext<'a, 'tcx>) -> Result<SyntaxContext, String> { fn decode(decoder: &mut DecodeContext<'a, 'tcx>) -> SyntaxContext {
let cdata = decoder.cdata(); let cdata = decoder.cdata();
let sess = decoder.sess.unwrap(); let sess = decoder.sess.unwrap();
let cname = cdata.root.name; let cname = cdata.root.name;
rustc_span::hygiene::decode_syntax_context(decoder, &cdata.hygiene_context, |_, id| { rustc_span::hygiene::decode_syntax_context(decoder, &cdata.hygiene_context, |_, id| {
debug!("SpecializedDecoder<SyntaxContext>: decoding {}", id); debug!("SpecializedDecoder<SyntaxContext>: decoding {}", id);
Ok(cdata cdata
.root .root
.syntax_contexts .syntax_contexts
.get(cdata, id) .get(cdata, id)
.unwrap_or_else(|| panic!("Missing SyntaxContext {:?} for crate {:?}", id, cname)) .unwrap_or_else(|| panic!("Missing SyntaxContext {:?} for crate {:?}", id, cname))
.decode((cdata, sess))) .decode((cdata, sess))
}) })
} }
} }
impl<'a, 'tcx> Decodable<DecodeContext<'a, 'tcx>> for ExpnId { impl<'a, 'tcx> Decodable<DecodeContext<'a, 'tcx>> for ExpnId {
fn decode(decoder: &mut DecodeContext<'a, 'tcx>) -> Result<ExpnId, String> { fn decode(decoder: &mut DecodeContext<'a, 'tcx>) -> ExpnId {
let local_cdata = decoder.cdata(); let local_cdata = decoder.cdata();
let sess = decoder.sess.unwrap(); let sess = decoder.sess.unwrap();
let cnum = CrateNum::decode(decoder)?; let cnum = CrateNum::decode(decoder);
let index = u32::decode(decoder)?; let index = u32::decode(decoder);
let expn_id = rustc_span::hygiene::decode_expn_id(cnum, index, |expn_id| { let expn_id = rustc_span::hygiene::decode_expn_id(cnum, index, |expn_id| {
let ExpnId { krate: cnum, local_id: index } = expn_id; let ExpnId { krate: cnum, local_id: index } = expn_id;
@ -453,23 +446,23 @@ impl<'a, 'tcx> Decodable<DecodeContext<'a, 'tcx>> for ExpnId {
.decode((crate_data, sess)); .decode((crate_data, sess));
(expn_data, expn_hash) (expn_data, expn_hash)
}); });
Ok(expn_id) expn_id
} }
} }
impl<'a, 'tcx> Decodable<DecodeContext<'a, 'tcx>> for Span { impl<'a, 'tcx> Decodable<DecodeContext<'a, 'tcx>> for Span {
fn decode(decoder: &mut DecodeContext<'a, 'tcx>) -> Result<Span, String> { fn decode(decoder: &mut DecodeContext<'a, 'tcx>) -> Span {
let ctxt = SyntaxContext::decode(decoder)?; let ctxt = SyntaxContext::decode(decoder);
let tag = u8::decode(decoder)?; let tag = u8::decode(decoder);
if tag == TAG_PARTIAL_SPAN { if tag == TAG_PARTIAL_SPAN {
return Ok(DUMMY_SP.with_ctxt(ctxt)); return DUMMY_SP.with_ctxt(ctxt);
} }
debug_assert!(tag == TAG_VALID_SPAN_LOCAL || tag == TAG_VALID_SPAN_FOREIGN); debug_assert!(tag == TAG_VALID_SPAN_LOCAL || tag == TAG_VALID_SPAN_FOREIGN);
let lo = BytePos::decode(decoder)?; let lo = BytePos::decode(decoder);
let len = BytePos::decode(decoder)?; let len = BytePos::decode(decoder);
let hi = lo + len; let hi = lo + len;
let Some(sess) = decoder.sess else { let Some(sess) = decoder.sess else {
@ -512,7 +505,7 @@ impl<'a, 'tcx> Decodable<DecodeContext<'a, 'tcx>> for Span {
if decoder.cdata().root.is_proc_macro_crate() { if decoder.cdata().root.is_proc_macro_crate() {
// Decode `CrateNum` as u32 - using `CrateNum::decode` will ICE // Decode `CrateNum` as u32 - using `CrateNum::decode` will ICE
// since we don't have `cnum_map` populated. // since we don't have `cnum_map` populated.
let cnum = u32::decode(decoder)?; let cnum = u32::decode(decoder);
panic!( panic!(
"Decoding of crate {:?} tried to access proc-macro dep {:?}", "Decoding of crate {:?} tried to access proc-macro dep {:?}",
decoder.cdata().root.name, decoder.cdata().root.name,
@ -520,7 +513,7 @@ impl<'a, 'tcx> Decodable<DecodeContext<'a, 'tcx>> for Span {
); );
} }
// tag is TAG_VALID_SPAN_FOREIGN, checked by `debug_assert` above // tag is TAG_VALID_SPAN_FOREIGN, checked by `debug_assert` above
let cnum = CrateNum::decode(decoder)?; let cnum = CrateNum::decode(decoder);
debug!( debug!(
"SpecializedDecoder<Span>::specialized_decode: loading source files from cnum {:?}", "SpecializedDecoder<Span>::specialized_decode: loading source files from cnum {:?}",
cnum cnum
@ -582,18 +575,18 @@ impl<'a, 'tcx> Decodable<DecodeContext<'a, 'tcx>> for Span {
(hi + source_file.translated_source_file.start_pos) - source_file.original_start_pos; (hi + source_file.translated_source_file.start_pos) - source_file.original_start_pos;
// Do not try to decode parent for foreign spans. // Do not try to decode parent for foreign spans.
Ok(Span::new(lo, hi, ctxt, None)) Span::new(lo, hi, ctxt, None)
} }
} }
impl<'a, 'tcx> Decodable<DecodeContext<'a, 'tcx>> for &'tcx [thir::abstract_const::Node<'tcx>] { impl<'a, 'tcx> Decodable<DecodeContext<'a, 'tcx>> for &'tcx [thir::abstract_const::Node<'tcx>] {
fn decode(d: &mut DecodeContext<'a, 'tcx>) -> Result<Self, String> { fn decode(d: &mut DecodeContext<'a, 'tcx>) -> Self {
ty::codec::RefDecodable::decode(d) ty::codec::RefDecodable::decode(d)
} }
} }
impl<'a, 'tcx> Decodable<DecodeContext<'a, 'tcx>> for &'tcx [(ty::Predicate<'tcx>, Span)] { impl<'a, 'tcx> Decodable<DecodeContext<'a, 'tcx>> for &'tcx [(ty::Predicate<'tcx>, Span)] {
fn decode(d: &mut DecodeContext<'a, 'tcx>) -> Result<Self, String> { fn decode(d: &mut DecodeContext<'a, 'tcx>) -> Self {
ty::codec::RefDecodable::decode(d) ty::codec::RefDecodable::decode(d)
} }
} }
@ -601,7 +594,7 @@ impl<'a, 'tcx> Decodable<DecodeContext<'a, 'tcx>> for &'tcx [(ty::Predicate<'tcx
impl<'a, 'tcx, T: Decodable<DecodeContext<'a, 'tcx>>> Decodable<DecodeContext<'a, 'tcx>> impl<'a, 'tcx, T: Decodable<DecodeContext<'a, 'tcx>>> Decodable<DecodeContext<'a, 'tcx>>
for Lazy<T> for Lazy<T>
{ {
fn decode(decoder: &mut DecodeContext<'a, 'tcx>) -> Result<Self, String> { fn decode(decoder: &mut DecodeContext<'a, 'tcx>) -> Self {
decoder.read_lazy_with_meta(()) decoder.read_lazy_with_meta(())
} }
} }
@ -609,9 +602,9 @@ impl<'a, 'tcx, T: Decodable<DecodeContext<'a, 'tcx>>> Decodable<DecodeContext<'a
impl<'a, 'tcx, T: Decodable<DecodeContext<'a, 'tcx>>> Decodable<DecodeContext<'a, 'tcx>> impl<'a, 'tcx, T: Decodable<DecodeContext<'a, 'tcx>>> Decodable<DecodeContext<'a, 'tcx>>
for Lazy<[T]> for Lazy<[T]>
{ {
fn decode(decoder: &mut DecodeContext<'a, 'tcx>) -> Result<Self, String> { fn decode(decoder: &mut DecodeContext<'a, 'tcx>) -> Self {
let len = decoder.read_usize()?; let len = decoder.read_usize();
if len == 0 { Ok(Lazy::empty()) } else { decoder.read_lazy_with_meta(len) } if len == 0 { Lazy::empty() } else { decoder.read_lazy_with_meta(len) }
} }
} }
@ -620,8 +613,8 @@ impl<'a, 'tcx, I: Idx, T: Decodable<DecodeContext<'a, 'tcx>>> Decodable<DecodeCo
where where
Option<T>: FixedSizeEncoding, Option<T>: FixedSizeEncoding,
{ {
fn decode(decoder: &mut DecodeContext<'a, 'tcx>) -> Result<Self, String> { fn decode(decoder: &mut DecodeContext<'a, 'tcx>) -> Self {
let len = decoder.read_usize()?; let len = decoder.read_usize();
decoder.read_lazy_with_meta(len) decoder.read_lazy_with_meta(len)
} }
} }

View file

@ -39,11 +39,11 @@ impl<'a, 'tcx> Encodable<EncodeContext<'a, 'tcx>> for DefPathHashMapRef<'tcx> {
} }
impl<'a, 'tcx> Decodable<DecodeContext<'a, 'tcx>> for DefPathHashMapRef<'static> { impl<'a, 'tcx> Decodable<DecodeContext<'a, 'tcx>> for DefPathHashMapRef<'static> {
fn decode(d: &mut DecodeContext<'a, 'tcx>) -> Result<DefPathHashMapRef<'static>, String> { fn decode(d: &mut DecodeContext<'a, 'tcx>) -> DefPathHashMapRef<'static> {
// Import TyDecoder so we can access the DecodeContext::position() method // Import TyDecoder so we can access the DecodeContext::position() method
use crate::rustc_middle::ty::codec::TyDecoder; use crate::rustc_middle::ty::codec::TyDecoder;
let len = d.read_usize()?; let len = d.read_usize();
let pos = d.position(); let pos = d.position();
let o = OwningRef::new(d.blob().clone()).map(|x| &x[pos..pos + len]); let o = OwningRef::new(d.blob().clone()).map(|x| &x[pos..pos + len]);
@ -52,7 +52,9 @@ impl<'a, 'tcx> Decodable<DecodeContext<'a, 'tcx>> for DefPathHashMapRef<'static>
// the method. We use read_raw_bytes() for that. // the method. We use read_raw_bytes() for that.
let _ = d.read_raw_bytes(len); let _ = d.read_raw_bytes(len);
let inner = odht::HashTable::from_raw_bytes(o).map_err(|e| format!("{}", e))?; let inner = odht::HashTable::from_raw_bytes(o).unwrap_or_else(|e| {
Ok(DefPathHashMapRef::OwnedFromMetadata(inner)) panic!("decode error: {}", e);
});
DefPathHashMapRef::OwnedFromMetadata(inner)
} }
} }

View file

@ -45,8 +45,9 @@ impl<S: serialize::Encoder> serialize::Encodable<S> for GraphIsCyclicCache {
impl<D: serialize::Decoder> serialize::Decodable<D> for GraphIsCyclicCache { impl<D: serialize::Decoder> serialize::Decodable<D> for GraphIsCyclicCache {
#[inline] #[inline]
fn decode(d: &mut D) -> Result<Self, D::Error> { fn decode(d: &mut D) -> Self {
serialize::Decodable::decode(d).map(|_v: ()| Self::new()) let () = serialize::Decodable::decode(d);
Self::new()
} }
} }

View file

@ -273,20 +273,20 @@ pub struct AllocDecodingSession<'s> {
impl<'s> AllocDecodingSession<'s> { impl<'s> AllocDecodingSession<'s> {
/// Decodes an `AllocId` in a thread-safe way. /// Decodes an `AllocId` in a thread-safe way.
pub fn decode_alloc_id<'tcx, D>(&self, decoder: &mut D) -> Result<AllocId, D::Error> pub fn decode_alloc_id<'tcx, D>(&self, decoder: &mut D) -> AllocId
where where
D: TyDecoder<'tcx>, D: TyDecoder<'tcx>,
{ {
// Read the index of the allocation. // Read the index of the allocation.
let idx = usize::try_from(decoder.read_u32()?).unwrap(); let idx = usize::try_from(decoder.read_u32()).unwrap();
let pos = usize::try_from(self.state.data_offsets[idx]).unwrap(); let pos = usize::try_from(self.state.data_offsets[idx]).unwrap();
// Decode the `AllocDiscriminant` now so that we know if we have to reserve an // Decode the `AllocDiscriminant` now so that we know if we have to reserve an
// `AllocId`. // `AllocId`.
let (alloc_kind, pos) = decoder.with_position(pos, |decoder| { let (alloc_kind, pos) = decoder.with_position(pos, |decoder| {
let alloc_kind = AllocDiscriminant::decode(decoder)?; let alloc_kind = AllocDiscriminant::decode(decoder);
Ok((alloc_kind, decoder.position())) (alloc_kind, decoder.position())
})?; });
// Check the decoding state to see if it's already decoded or if we should // Check the decoding state to see if it's already decoded or if we should
// decode it here. // decode it here.
@ -295,7 +295,7 @@ impl<'s> AllocDecodingSession<'s> {
match *entry { match *entry {
State::Done(alloc_id) => { State::Done(alloc_id) => {
return Ok(alloc_id); return alloc_id;
} }
ref mut entry @ State::Empty => { ref mut entry @ State::Empty => {
// We are allowed to decode. // We are allowed to decode.
@ -329,7 +329,7 @@ impl<'s> AllocDecodingSession<'s> {
State::InProgress(ref mut sessions, alloc_id) => { State::InProgress(ref mut sessions, alloc_id) => {
if sessions.contains(&self.session_id) { if sessions.contains(&self.session_id) {
// Don't recurse. // Don't recurse.
return Ok(alloc_id); return alloc_id;
} else { } else {
// Start decoding concurrently. // Start decoding concurrently.
sessions.insert(self.session_id); sessions.insert(self.session_id);
@ -343,37 +343,37 @@ impl<'s> AllocDecodingSession<'s> {
let alloc_id = decoder.with_position(pos, |decoder| { let alloc_id = decoder.with_position(pos, |decoder| {
match alloc_kind { match alloc_kind {
AllocDiscriminant::Alloc => { AllocDiscriminant::Alloc => {
let alloc = <&'tcx Allocation as Decodable<_>>::decode(decoder)?; let alloc = <&'tcx Allocation as Decodable<_>>::decode(decoder);
// We already have a reserved `AllocId`. // We already have a reserved `AllocId`.
let alloc_id = alloc_id.unwrap(); let alloc_id = alloc_id.unwrap();
trace!("decoded alloc {:?}: {:#?}", alloc_id, alloc); trace!("decoded alloc {:?}: {:#?}", alloc_id, alloc);
decoder.tcx().set_alloc_id_same_memory(alloc_id, alloc); decoder.tcx().set_alloc_id_same_memory(alloc_id, alloc);
Ok(alloc_id) alloc_id
} }
AllocDiscriminant::Fn => { AllocDiscriminant::Fn => {
assert!(alloc_id.is_none()); assert!(alloc_id.is_none());
trace!("creating fn alloc ID"); trace!("creating fn alloc ID");
let instance = ty::Instance::decode(decoder)?; let instance = ty::Instance::decode(decoder);
trace!("decoded fn alloc instance: {:?}", instance); trace!("decoded fn alloc instance: {:?}", instance);
let alloc_id = decoder.tcx().create_fn_alloc(instance); let alloc_id = decoder.tcx().create_fn_alloc(instance);
Ok(alloc_id) alloc_id
} }
AllocDiscriminant::Static => { AllocDiscriminant::Static => {
assert!(alloc_id.is_none()); assert!(alloc_id.is_none());
trace!("creating extern static alloc ID"); trace!("creating extern static alloc ID");
let did = <DefId as Decodable<D>>::decode(decoder)?; let did = <DefId as Decodable<D>>::decode(decoder);
trace!("decoded static def-ID: {:?}", did); trace!("decoded static def-ID: {:?}", did);
let alloc_id = decoder.tcx().create_static_alloc(did); let alloc_id = decoder.tcx().create_static_alloc(did);
Ok(alloc_id) alloc_id
} }
} }
})?; });
self.state.decoding_state[idx].with_lock(|entry| { self.state.decoding_state[idx].with_lock(|entry| {
*entry = State::Done(alloc_id); *entry = State::Done(alloc_id);
}); });
Ok(alloc_id) alloc_id
} }
} }

View file

@ -619,20 +619,20 @@ impl<'tcx, E: TyEncoder<'tcx>, T: Encodable<E>> Encodable<E> for ClearCrossCrate
} }
impl<'tcx, D: TyDecoder<'tcx>, T: Decodable<D>> Decodable<D> for ClearCrossCrate<T> { impl<'tcx, D: TyDecoder<'tcx>, T: Decodable<D>> Decodable<D> for ClearCrossCrate<T> {
#[inline] #[inline]
fn decode(d: &mut D) -> Result<ClearCrossCrate<T>, D::Error> { fn decode(d: &mut D) -> ClearCrossCrate<T> {
if D::CLEAR_CROSS_CRATE { if D::CLEAR_CROSS_CRATE {
return Ok(ClearCrossCrate::Clear); return ClearCrossCrate::Clear;
} }
let discr = u8::decode(d)?; let discr = u8::decode(d);
match discr { match discr {
TAG_CLEAR_CROSS_CRATE_CLEAR => Ok(ClearCrossCrate::Clear), TAG_CLEAR_CROSS_CRATE_CLEAR => ClearCrossCrate::Clear,
TAG_CLEAR_CROSS_CRATE_SET => { TAG_CLEAR_CROSS_CRATE_SET => {
let val = T::decode(d)?; let val = T::decode(d);
Ok(ClearCrossCrate::Set(val)) ClearCrossCrate::Set(val)
} }
tag => Err(d.error(&format!("Invalid tag for ClearCrossCrate: {:?}", tag))), tag => panic!("Invalid tag for ClearCrossCrate: {:?}", tag),
} }
} }
} }

View file

@ -57,14 +57,15 @@ impl PredecessorCache {
impl<S: serialize::Encoder> serialize::Encodable<S> for PredecessorCache { impl<S: serialize::Encoder> serialize::Encodable<S> for PredecessorCache {
#[inline] #[inline]
fn encode(&self, s: &mut S) -> Result<(), S::Error> { fn encode(&self, s: &mut S) -> Result<(), S::Error> {
serialize::Encodable::encode(&(), s) s.emit_unit()
} }
} }
impl<D: serialize::Decoder> serialize::Decodable<D> for PredecessorCache { impl<D: serialize::Decoder> serialize::Decodable<D> for PredecessorCache {
#[inline] #[inline]
fn decode(d: &mut D) -> Result<Self, D::Error> { fn decode(d: &mut D) -> Self {
serialize::Decodable::decode(d).map(|_v: ()| Self::new()) let () = d.read_unit();
Self::new()
} }
} }

View file

@ -14,7 +14,7 @@ use crate::mir::{
}; };
use crate::thir; use crate::thir;
use crate::ty::subst::SubstsRef; use crate::ty::subst::SubstsRef;
use crate::ty::{self, List, Ty, TyCtxt}; use crate::ty::{self, Ty, TyCtxt};
use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::fx::FxHashMap;
use rustc_serialize::{Decodable, Decoder, Encodable, Encoder}; use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
use rustc_span::Span; use rustc_span::Span;
@ -71,7 +71,7 @@ pub trait TyEncoder<'tcx>: Encoder {
/// `Decodable` can still be implemented in cases where `Decodable` is required /// `Decodable` can still be implemented in cases where `Decodable` is required
/// by a trait bound. /// by a trait bound.
pub trait RefDecodable<'tcx, D: TyDecoder<'tcx>> { pub trait RefDecodable<'tcx, D: TyDecoder<'tcx>> {
fn decode(d: &mut D) -> Result<&'tcx Self, D::Error>; fn decode(d: &mut D) -> &'tcx Self;
} }
/// Encode the given value or a previously cached shorthand. /// Encode the given value or a previously cached shorthand.
@ -172,13 +172,9 @@ pub trait TyDecoder<'tcx>: Decoder {
fn position(&self) -> usize; fn position(&self) -> usize;
fn cached_ty_for_shorthand<F>( fn cached_ty_for_shorthand<F>(&mut self, shorthand: usize, or_insert_with: F) -> Ty<'tcx>
&mut self,
shorthand: usize,
or_insert_with: F,
) -> Result<Ty<'tcx>, Self::Error>
where where
F: FnOnce(&mut Self) -> Result<Ty<'tcx>, Self::Error>; F: FnOnce(&mut Self) -> Ty<'tcx>;
fn with_position<F, R>(&mut self, pos: usize, f: F) -> R fn with_position<F, R>(&mut self, pos: usize, f: F) -> R
where where
@ -188,35 +184,35 @@ pub trait TyDecoder<'tcx>: Decoder {
(self.peek_byte() & (SHORTHAND_OFFSET as u8)) != 0 (self.peek_byte() & (SHORTHAND_OFFSET as u8)) != 0
} }
fn decode_alloc_id(&mut self) -> Result<AllocId, Self::Error>; fn decode_alloc_id(&mut self) -> AllocId;
} }
#[inline] #[inline]
fn decode_arena_allocable<'tcx, D, T: ArenaAllocatable<'tcx> + Decodable<D>>( fn decode_arena_allocable<'tcx, D, T: ArenaAllocatable<'tcx> + Decodable<D>>(
decoder: &mut D, decoder: &mut D,
) -> Result<&'tcx T, D::Error> ) -> &'tcx T
where where
D: TyDecoder<'tcx>, D: TyDecoder<'tcx>,
{ {
Ok(decoder.tcx().arena.alloc(Decodable::decode(decoder)?)) decoder.tcx().arena.alloc(Decodable::decode(decoder))
} }
#[inline] #[inline]
fn decode_arena_allocable_slice<'tcx, D, T: ArenaAllocatable<'tcx> + Decodable<D>>( fn decode_arena_allocable_slice<'tcx, D, T: ArenaAllocatable<'tcx> + Decodable<D>>(
decoder: &mut D, decoder: &mut D,
) -> Result<&'tcx [T], D::Error> ) -> &'tcx [T]
where where
D: TyDecoder<'tcx>, D: TyDecoder<'tcx>,
{ {
Ok(decoder.tcx().arena.alloc_from_iter(<Vec<T> as Decodable<D>>::decode(decoder)?)) decoder.tcx().arena.alloc_from_iter(<Vec<T> as Decodable<D>>::decode(decoder))
} }
impl<'tcx, D: TyDecoder<'tcx>> Decodable<D> for Ty<'tcx> { impl<'tcx, D: TyDecoder<'tcx>> Decodable<D> for Ty<'tcx> {
#[allow(rustc::usage_of_ty_tykind)] #[allow(rustc::usage_of_ty_tykind)]
fn decode(decoder: &mut D) -> Result<Ty<'tcx>, D::Error> { fn decode(decoder: &mut D) -> Ty<'tcx> {
// Handle shorthands first, if we have a usize > 0x80. // Handle shorthands first, if we have a usize > 0x80.
if decoder.positioned_at_shorthand() { if decoder.positioned_at_shorthand() {
let pos = decoder.read_usize()?; let pos = decoder.read_usize();
assert!(pos >= SHORTHAND_OFFSET); assert!(pos >= SHORTHAND_OFFSET);
let shorthand = pos - SHORTHAND_OFFSET; let shorthand = pos - SHORTHAND_OFFSET;
@ -225,87 +221,89 @@ impl<'tcx, D: TyDecoder<'tcx>> Decodable<D> for Ty<'tcx> {
}) })
} else { } else {
let tcx = decoder.tcx(); let tcx = decoder.tcx();
Ok(tcx.mk_ty(ty::TyKind::decode(decoder)?)) tcx.mk_ty(ty::TyKind::decode(decoder))
} }
} }
} }
impl<'tcx, D: TyDecoder<'tcx>> Decodable<D> for ty::Binder<'tcx, ty::PredicateKind<'tcx>> { impl<'tcx, D: TyDecoder<'tcx>> Decodable<D> for ty::Binder<'tcx, ty::PredicateKind<'tcx>> {
fn decode(decoder: &mut D) -> Result<ty::Binder<'tcx, ty::PredicateKind<'tcx>>, D::Error> { fn decode(decoder: &mut D) -> ty::Binder<'tcx, ty::PredicateKind<'tcx>> {
let bound_vars = Decodable::decode(decoder)?; let bound_vars = Decodable::decode(decoder);
// Handle shorthands first, if we have a usize > 0x80. // Handle shorthands first, if we have a usize > 0x80.
Ok(ty::Binder::bind_with_vars( ty::Binder::bind_with_vars(
if decoder.positioned_at_shorthand() { if decoder.positioned_at_shorthand() {
let pos = decoder.read_usize()?; let pos = decoder.read_usize();
assert!(pos >= SHORTHAND_OFFSET); assert!(pos >= SHORTHAND_OFFSET);
let shorthand = pos - SHORTHAND_OFFSET; let shorthand = pos - SHORTHAND_OFFSET;
decoder.with_position(shorthand, ty::PredicateKind::decode)? decoder.with_position(shorthand, ty::PredicateKind::decode)
} else { } else {
ty::PredicateKind::decode(decoder)? ty::PredicateKind::decode(decoder)
}, },
bound_vars, bound_vars,
)) )
} }
} }
impl<'tcx, D: TyDecoder<'tcx>> Decodable<D> for ty::Predicate<'tcx> { impl<'tcx, D: TyDecoder<'tcx>> Decodable<D> for ty::Predicate<'tcx> {
fn decode(decoder: &mut D) -> Result<ty::Predicate<'tcx>, D::Error> { fn decode(decoder: &mut D) -> ty::Predicate<'tcx> {
let predicate_kind = Decodable::decode(decoder)?; let predicate_kind = Decodable::decode(decoder);
let predicate = decoder.tcx().mk_predicate(predicate_kind); decoder.tcx().mk_predicate(predicate_kind)
Ok(predicate)
} }
} }
impl<'tcx, D: TyDecoder<'tcx>> Decodable<D> for SubstsRef<'tcx> { impl<'tcx, D: TyDecoder<'tcx>> Decodable<D> for SubstsRef<'tcx> {
fn decode(decoder: &mut D) -> Result<Self, D::Error> { fn decode(decoder: &mut D) -> Self {
let len = decoder.read_usize()?; let len = decoder.read_usize();
let tcx = decoder.tcx(); let tcx = decoder.tcx();
tcx.mk_substs((0..len).map(|_| Decodable::decode(decoder))) tcx.mk_substs(
(0..len).map::<ty::subst::GenericArg<'tcx>, _>(|_| Decodable::decode(decoder)),
)
} }
} }
impl<'tcx, D: TyDecoder<'tcx>> Decodable<D> for mir::Place<'tcx> { impl<'tcx, D: TyDecoder<'tcx>> Decodable<D> for mir::Place<'tcx> {
fn decode(decoder: &mut D) -> Result<Self, D::Error> { fn decode(decoder: &mut D) -> Self {
let local: mir::Local = Decodable::decode(decoder)?; let local: mir::Local = Decodable::decode(decoder);
let len = decoder.read_usize()?; let len = decoder.read_usize();
let projection: &'tcx List<mir::PlaceElem<'tcx>> = let projection = decoder.tcx().mk_place_elems(
decoder.tcx().mk_place_elems((0..len).map(|_| Decodable::decode(decoder)))?; (0..len).map::<mir::PlaceElem<'tcx>, _>(|_| Decodable::decode(decoder)),
Ok(mir::Place { local, projection }) );
mir::Place { local, projection }
} }
} }
impl<'tcx, D: TyDecoder<'tcx>> Decodable<D> for ty::Region<'tcx> { impl<'tcx, D: TyDecoder<'tcx>> Decodable<D> for ty::Region<'tcx> {
fn decode(decoder: &mut D) -> Result<Self, D::Error> { fn decode(decoder: &mut D) -> Self {
Ok(decoder.tcx().mk_region(Decodable::decode(decoder)?)) decoder.tcx().mk_region(Decodable::decode(decoder))
} }
} }
impl<'tcx, D: TyDecoder<'tcx>> Decodable<D> for CanonicalVarInfos<'tcx> { impl<'tcx, D: TyDecoder<'tcx>> Decodable<D> for CanonicalVarInfos<'tcx> {
fn decode(decoder: &mut D) -> Result<Self, D::Error> { fn decode(decoder: &mut D) -> Self {
let len = decoder.read_usize()?; let len = decoder.read_usize();
let interned: Result<Vec<CanonicalVarInfo<'tcx>>, _> = let interned: Vec<CanonicalVarInfo<'tcx>> =
(0..len).map(|_| Decodable::decode(decoder)).collect(); (0..len).map(|_| Decodable::decode(decoder)).collect();
Ok(decoder.tcx().intern_canonical_var_infos(interned?.as_slice())) decoder.tcx().intern_canonical_var_infos(interned.as_slice())
} }
} }
impl<'tcx, D: TyDecoder<'tcx>> Decodable<D> for AllocId { impl<'tcx, D: TyDecoder<'tcx>> Decodable<D> for AllocId {
fn decode(decoder: &mut D) -> Result<Self, D::Error> { fn decode(decoder: &mut D) -> Self {
decoder.decode_alloc_id() decoder.decode_alloc_id()
} }
} }
impl<'tcx, D: TyDecoder<'tcx>> Decodable<D> for ty::SymbolName<'tcx> { impl<'tcx, D: TyDecoder<'tcx>> Decodable<D> for ty::SymbolName<'tcx> {
fn decode(decoder: &mut D) -> Result<Self, D::Error> { fn decode(decoder: &mut D) -> Self {
Ok(ty::SymbolName::new(decoder.tcx(), &decoder.read_str()?)) ty::SymbolName::new(decoder.tcx(), &decoder.read_str())
} }
} }
macro_rules! impl_decodable_via_ref { macro_rules! impl_decodable_via_ref {
($($t:ty),+) => { ($($t:ty),+) => {
$(impl<'tcx, D: TyDecoder<'tcx>> Decodable<D> for $t { $(impl<'tcx, D: TyDecoder<'tcx>> Decodable<D> for $t {
fn decode(decoder: &mut D) -> Result<Self, D::Error> { fn decode(decoder: &mut D) -> Self {
RefDecodable::decode(decoder) RefDecodable::decode(decoder)
} }
})* })*
@ -313,77 +311,73 @@ macro_rules! impl_decodable_via_ref {
} }
impl<'tcx, D: TyDecoder<'tcx>> RefDecodable<'tcx, D> for ty::List<Ty<'tcx>> { impl<'tcx, D: TyDecoder<'tcx>> RefDecodable<'tcx, D> for ty::List<Ty<'tcx>> {
fn decode(decoder: &mut D) -> Result<&'tcx Self, D::Error> { fn decode(decoder: &mut D) -> &'tcx Self {
let len = decoder.read_usize()?; let len = decoder.read_usize();
decoder.tcx().mk_type_list((0..len).map(|_| Decodable::decode(decoder))) decoder.tcx().mk_type_list((0..len).map::<Ty<'tcx>, _>(|_| Decodable::decode(decoder)))
} }
} }
impl<'tcx, D: TyDecoder<'tcx>> RefDecodable<'tcx, D> impl<'tcx, D: TyDecoder<'tcx>> RefDecodable<'tcx, D>
for ty::List<ty::Binder<'tcx, ty::ExistentialPredicate<'tcx>>> for ty::List<ty::Binder<'tcx, ty::ExistentialPredicate<'tcx>>>
{ {
fn decode(decoder: &mut D) -> Result<&'tcx Self, D::Error> { fn decode(decoder: &mut D) -> &'tcx Self {
let len = decoder.read_usize()?; let len = decoder.read_usize();
decoder.tcx().mk_poly_existential_predicates((0..len).map(|_| Decodable::decode(decoder))) decoder.tcx().mk_poly_existential_predicates(
(0..len).map::<ty::Binder<'tcx, _>, _>(|_| Decodable::decode(decoder)),
)
} }
} }
impl<'tcx, D: TyDecoder<'tcx>> RefDecodable<'tcx, D> for ty::Const<'tcx> { impl<'tcx, D: TyDecoder<'tcx>> RefDecodable<'tcx, D> for ty::Const<'tcx> {
fn decode(decoder: &mut D) -> Result<&'tcx Self, D::Error> { fn decode(decoder: &mut D) -> &'tcx Self {
Ok(decoder.tcx().mk_const(Decodable::decode(decoder)?)) decoder.tcx().mk_const(Decodable::decode(decoder))
} }
} }
impl<'tcx, D: TyDecoder<'tcx>> RefDecodable<'tcx, D> for [ty::ValTree<'tcx>] { impl<'tcx, D: TyDecoder<'tcx>> RefDecodable<'tcx, D> for [ty::ValTree<'tcx>] {
fn decode(decoder: &mut D) -> Result<&'tcx Self, D::Error> { fn decode(decoder: &mut D) -> &'tcx Self {
Ok(decoder.tcx().arena.alloc_from_iter( decoder.tcx().arena.alloc_from_iter(
(0..decoder.read_usize()?) (0..decoder.read_usize()).map(|_| Decodable::decode(decoder)).collect::<Vec<_>>(),
.map(|_| Decodable::decode(decoder)) )
.collect::<Result<Vec<_>, _>>()?,
))
} }
} }
impl<'tcx, D: TyDecoder<'tcx>> RefDecodable<'tcx, D> for Allocation { impl<'tcx, D: TyDecoder<'tcx>> RefDecodable<'tcx, D> for Allocation {
fn decode(decoder: &mut D) -> Result<&'tcx Self, D::Error> { fn decode(decoder: &mut D) -> &'tcx Self {
Ok(decoder.tcx().intern_const_alloc(Decodable::decode(decoder)?)) decoder.tcx().intern_const_alloc(Decodable::decode(decoder))
} }
} }
impl<'tcx, D: TyDecoder<'tcx>> RefDecodable<'tcx, D> for [(ty::Predicate<'tcx>, Span)] { impl<'tcx, D: TyDecoder<'tcx>> RefDecodable<'tcx, D> for [(ty::Predicate<'tcx>, Span)] {
fn decode(decoder: &mut D) -> Result<&'tcx Self, D::Error> { fn decode(decoder: &mut D) -> &'tcx Self {
Ok(decoder.tcx().arena.alloc_from_iter( decoder.tcx().arena.alloc_from_iter(
(0..decoder.read_usize()?) (0..decoder.read_usize()).map(|_| Decodable::decode(decoder)).collect::<Vec<_>>(),
.map(|_| Decodable::decode(decoder)) )
.collect::<Result<Vec<_>, _>>()?,
))
} }
} }
impl<'tcx, D: TyDecoder<'tcx>> RefDecodable<'tcx, D> for [thir::abstract_const::Node<'tcx>] { impl<'tcx, D: TyDecoder<'tcx>> RefDecodable<'tcx, D> for [thir::abstract_const::Node<'tcx>] {
fn decode(decoder: &mut D) -> Result<&'tcx Self, D::Error> { fn decode(decoder: &mut D) -> &'tcx Self {
Ok(decoder.tcx().arena.alloc_from_iter( decoder.tcx().arena.alloc_from_iter(
(0..decoder.read_usize()?) (0..decoder.read_usize()).map(|_| Decodable::decode(decoder)).collect::<Vec<_>>(),
.map(|_| Decodable::decode(decoder)) )
.collect::<Result<Vec<_>, _>>()?,
))
} }
} }
impl<'tcx, D: TyDecoder<'tcx>> RefDecodable<'tcx, D> for [thir::abstract_const::NodeId] { impl<'tcx, D: TyDecoder<'tcx>> RefDecodable<'tcx, D> for [thir::abstract_const::NodeId] {
fn decode(decoder: &mut D) -> Result<&'tcx Self, D::Error> { fn decode(decoder: &mut D) -> &'tcx Self {
Ok(decoder.tcx().arena.alloc_from_iter( decoder.tcx().arena.alloc_from_iter(
(0..decoder.read_usize()?) (0..decoder.read_usize()).map(|_| Decodable::decode(decoder)).collect::<Vec<_>>(),
.map(|_| Decodable::decode(decoder)) )
.collect::<Result<Vec<_>, _>>()?,
))
} }
} }
impl<'tcx, D: TyDecoder<'tcx>> RefDecodable<'tcx, D> for ty::List<ty::BoundVariableKind> { impl<'tcx, D: TyDecoder<'tcx>> RefDecodable<'tcx, D> for ty::List<ty::BoundVariableKind> {
fn decode(decoder: &mut D) -> Result<&'tcx Self, D::Error> { fn decode(decoder: &mut D) -> &'tcx Self {
let len = decoder.read_usize()?; let len = decoder.read_usize();
decoder.tcx().mk_bound_variable_kinds((0..len).map(|_| Decodable::decode(decoder))) decoder.tcx().mk_bound_variable_kinds(
(0..len).map::<ty::BoundVariableKind, _>(|_| Decodable::decode(decoder)),
)
} }
} }
@ -405,7 +399,7 @@ macro_rules! __impl_decoder_methods {
($($name:ident -> $ty:ty;)*) => { ($($name:ident -> $ty:ty;)*) => {
$( $(
#[inline] #[inline]
fn $name(&mut self) -> Result<$ty, Self::Error> { fn $name(&mut self) -> $ty {
self.opaque.$name() self.opaque.$name()
} }
)* )*
@ -418,14 +412,14 @@ macro_rules! impl_arena_allocatable_decoder {
[$name:ident: $ty:ty]) => { [$name:ident: $ty:ty]) => {
impl<'tcx, D: TyDecoder<'tcx>> RefDecodable<'tcx, D> for $ty { impl<'tcx, D: TyDecoder<'tcx>> RefDecodable<'tcx, D> for $ty {
#[inline] #[inline]
fn decode(decoder: &mut D) -> Result<&'tcx Self, D::Error> { fn decode(decoder: &mut D) -> &'tcx Self {
decode_arena_allocable(decoder) decode_arena_allocable(decoder)
} }
} }
impl<'tcx, D: TyDecoder<'tcx>> RefDecodable<'tcx, D> for [$ty] { impl<'tcx, D: TyDecoder<'tcx>> RefDecodable<'tcx, D> for [$ty] {
#[inline] #[inline]
fn decode(decoder: &mut D) -> Result<&'tcx Self, D::Error> { fn decode(decoder: &mut D) -> &'tcx Self {
decode_arena_allocable_slice(decoder) decode_arena_allocable_slice(decoder)
} }
} }
@ -456,8 +450,6 @@ macro_rules! implement_ty_decoder {
use super::$DecoderName; use super::$DecoderName;
impl<$($typaram ),*> Decoder for $DecoderName<$($typaram),*> { impl<$($typaram ),*> Decoder for $DecoderName<$($typaram),*> {
type Error = String;
$crate::__impl_decoder_methods! { $crate::__impl_decoder_methods! {
read_unit -> (); read_unit -> ();
@ -483,13 +475,9 @@ macro_rules! implement_ty_decoder {
} }
#[inline] #[inline]
fn read_raw_bytes_into(&mut self, bytes: &mut [u8]) -> Result<(), Self::Error> { fn read_raw_bytes_into(&mut self, bytes: &mut [u8]) -> () {
self.opaque.read_raw_bytes_into(bytes) self.opaque.read_raw_bytes_into(bytes)
} }
fn error(&mut self, err: &str) -> Self::Error {
self.opaque.error(err)
}
} }
} }
} }
@ -505,9 +493,9 @@ macro_rules! impl_binder_encode_decode {
} }
} }
impl<'tcx, D: TyDecoder<'tcx>> Decodable<D> for ty::Binder<'tcx, $t> { impl<'tcx, D: TyDecoder<'tcx>> Decodable<D> for ty::Binder<'tcx, $t> {
fn decode(decoder: &mut D) -> Result<Self, D::Error> { fn decode(decoder: &mut D) -> Self {
let bound_vars = Decodable::decode(decoder)?; let bound_vars = Decodable::decode(decoder);
Ok(ty::Binder::bind_with_vars(Decodable::decode(decoder)?, bound_vars)) ty::Binder::bind_with_vars(Decodable::decode(decoder), bound_vars)
} }
} }
)* )*

View file

@ -147,8 +147,8 @@ impl<S: Encoder> Encodable<S> for ScalarInt {
} }
impl<D: Decoder> Decodable<D> for ScalarInt { impl<D: Decoder> Decodable<D> for ScalarInt {
fn decode(d: &mut D) -> Result<ScalarInt, D::Error> { fn decode(d: &mut D) -> ScalarInt {
Ok(ScalarInt { data: d.read_u128()?, size: d.read_u8()? }) ScalarInt { data: d.read_u128(), size: d.read_u8() }
} }
} }

View file

@ -2786,8 +2786,33 @@ pub trait InternIteratorElement<T, R>: Sized {
impl<T, R> InternIteratorElement<T, R> for T { impl<T, R> InternIteratorElement<T, R> for T {
type Output = R; type Output = R;
fn intern_with<I: Iterator<Item = Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output { fn intern_with<I: Iterator<Item = Self>, F: FnOnce(&[T]) -> R>(
f(&iter.collect::<SmallVec<[_; 8]>>()) mut iter: I,
f: F,
) -> Self::Output {
// This code is hot enough that it's worth specializing for the most
// common length lists, to avoid the overhead of `SmallVec` creation.
// Lengths 0, 1, and 2 typically account for ~95% of cases. We assume
// that if the upper and lower bounds from `size_hint` agree they are
// correct.
match iter.size_hint() {
(0, Some(0)) => {
assert!(iter.next().is_none());
f(&[])
}
(1, Some(1)) => {
let t0 = iter.next().unwrap();
assert!(iter.next().is_none());
f(&[t0])
}
(2, Some(2)) => {
let t0 = iter.next().unwrap();
let t1 = iter.next().unwrap();
assert!(iter.next().is_none());
f(&[t0, t1])
}
_ => f(&iter.collect::<SmallVec<[_; 8]>>()),
}
} }
} }
@ -2797,6 +2822,7 @@ where
{ {
type Output = R; type Output = R;
fn intern_with<I: Iterator<Item = Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output { fn intern_with<I: Iterator<Item = Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output {
// This code isn't hot.
f(&iter.cloned().collect::<SmallVec<[_; 8]>>()) f(&iter.cloned().collect::<SmallVec<[_; 8]>>())
} }
} }
@ -2809,10 +2835,14 @@ impl<T, R, E> InternIteratorElement<T, R> for Result<T, E> {
) -> Self::Output { ) -> Self::Output {
// This code is hot enough that it's worth specializing for the most // This code is hot enough that it's worth specializing for the most
// common length lists, to avoid the overhead of `SmallVec` creation. // common length lists, to avoid the overhead of `SmallVec` creation.
// The match arms are in order of frequency. The 1, 2, and 0 cases are // Lengths 0, 1, and 2 typically account for ~95% of cases. We assume
// typically hit in ~95% of cases. We assume that if the upper and // that if the upper and lower bounds from `size_hint` agree they are
// lower bounds from `size_hint` agree they are correct. // correct.
Ok(match iter.size_hint() { Ok(match iter.size_hint() {
(0, Some(0)) => {
assert!(iter.next().is_none());
f(&[])
}
(1, Some(1)) => { (1, Some(1)) => {
let t0 = iter.next().unwrap()?; let t0 = iter.next().unwrap()?;
assert!(iter.next().is_none()); assert!(iter.next().is_none());
@ -2824,10 +2854,6 @@ impl<T, R, E> InternIteratorElement<T, R> for Result<T, E> {
assert!(iter.next().is_none()); assert!(iter.next().is_none());
f(&[t0, t1]) f(&[t0, t1])
} }
(0, Some(0)) => {
assert!(iter.next().is_none());
f(&[])
}
_ => f(&iter.collect::<Result<SmallVec<[_; 8]>, _>>()?), _ => f(&iter.collect::<Result<SmallVec<[_; 8]>, _>>()?),
}) })
} }

View file

@ -180,8 +180,8 @@ impl<'tcx, E: TyEncoder<'tcx>> Encodable<E> for GenericArg<'tcx> {
} }
impl<'tcx, D: TyDecoder<'tcx>> Decodable<D> for GenericArg<'tcx> { impl<'tcx, D: TyDecoder<'tcx>> Decodable<D> for GenericArg<'tcx> {
fn decode(d: &mut D) -> Result<GenericArg<'tcx>, D::Error> { fn decode(d: &mut D) -> GenericArg<'tcx> {
Ok(GenericArgKind::decode(d)?.pack()) GenericArgKind::decode(d).pack()
} }
} }

View file

@ -163,15 +163,12 @@ impl<'sess> rustc_middle::ty::OnDiskCache<'sess> for OnDiskCache<'sess> {
// Decode the *position* of the footer, which can be found in the // Decode the *position* of the footer, which can be found in the
// last 8 bytes of the file. // last 8 bytes of the file.
decoder.set_position(data.len() - IntEncodedWithFixedSize::ENCODED_SIZE); decoder.set_position(data.len() - IntEncodedWithFixedSize::ENCODED_SIZE);
let footer_pos = IntEncodedWithFixedSize::decode(&mut decoder) let footer_pos = IntEncodedWithFixedSize::decode(&mut decoder).0 as usize;
.expect("error while trying to decode footer position")
.0 as usize;
// Decode the file footer, which contains all the lookup tables, etc. // Decode the file footer, which contains all the lookup tables, etc.
decoder.set_position(footer_pos); decoder.set_position(footer_pos);
decode_tagged(&mut decoder, TAG_FILE_FOOTER) decode_tagged(&mut decoder, TAG_FILE_FOOTER)
.expect("error while trying to decode footer position")
}; };
Self { Self {
@ -372,7 +369,7 @@ impl<'sess> OnDiskCache<'sess> {
dep_node_index: SerializedDepNodeIndex, dep_node_index: SerializedDepNodeIndex,
) -> QuerySideEffects { ) -> QuerySideEffects {
let side_effects: Option<QuerySideEffects> = let side_effects: Option<QuerySideEffects> =
self.load_indexed(tcx, dep_node_index, &self.prev_side_effects_index, "side_effects"); self.load_indexed(tcx, dep_node_index, &self.prev_side_effects_index);
side_effects.unwrap_or_default() side_effects.unwrap_or_default()
} }
@ -398,7 +395,7 @@ impl<'sess> OnDiskCache<'sess> {
where where
T: for<'a> Decodable<CacheDecoder<'a, 'tcx>>, T: for<'a> Decodable<CacheDecoder<'a, 'tcx>>,
{ {
self.load_indexed(tcx, dep_node_index, &self.query_result_index, "query result") self.load_indexed(tcx, dep_node_index, &self.query_result_index)
} }
/// Stores side effect emitted during computation of an anonymous query. /// Stores side effect emitted during computation of an anonymous query.
@ -423,17 +420,13 @@ impl<'sess> OnDiskCache<'sess> {
tcx: TyCtxt<'tcx>, tcx: TyCtxt<'tcx>,
dep_node_index: SerializedDepNodeIndex, dep_node_index: SerializedDepNodeIndex,
index: &FxHashMap<SerializedDepNodeIndex, AbsoluteBytePos>, index: &FxHashMap<SerializedDepNodeIndex, AbsoluteBytePos>,
debug_tag: &'static str,
) -> Option<T> ) -> Option<T>
where where
T: for<'a> Decodable<CacheDecoder<'a, 'tcx>>, T: for<'a> Decodable<CacheDecoder<'a, 'tcx>>,
{ {
let pos = index.get(&dep_node_index).cloned()?; let pos = index.get(&dep_node_index).cloned()?;
self.with_decoder(tcx, pos, |decoder| match decode_tagged(decoder, dep_node_index) { self.with_decoder(tcx, pos, |decoder| Some(decode_tagged(decoder, dep_node_index)))
Ok(v) => Some(v),
Err(e) => bug!("could not decode cached {}: {}", debug_tag, e),
})
} }
fn with_decoder<'a, 'tcx, T, F: for<'s> FnOnce(&mut CacheDecoder<'s, 'tcx>) -> T>( fn with_decoder<'a, 'tcx, T, F: for<'s> FnOnce(&mut CacheDecoder<'s, 'tcx>) -> T>(
@ -535,7 +528,7 @@ impl<'a, 'tcx> DecoderWithPosition for CacheDecoder<'a, 'tcx> {
// Decodes something that was encoded with `encode_tagged()` and verify that the // Decodes something that was encoded with `encode_tagged()` and verify that the
// tag matches and the correct amount of bytes was read. // tag matches and the correct amount of bytes was read.
fn decode_tagged<D, T, V>(decoder: &mut D, expected_tag: T) -> Result<V, D::Error> fn decode_tagged<D, T, V>(decoder: &mut D, expected_tag: T) -> V
where where
T: Decodable<D> + Eq + std::fmt::Debug, T: Decodable<D> + Eq + std::fmt::Debug,
V: Decodable<D>, V: Decodable<D>,
@ -543,15 +536,15 @@ where
{ {
let start_pos = decoder.position(); let start_pos = decoder.position();
let actual_tag = T::decode(decoder)?; let actual_tag = T::decode(decoder);
assert_eq!(actual_tag, expected_tag); assert_eq!(actual_tag, expected_tag);
let value = V::decode(decoder)?; let value = V::decode(decoder);
let end_pos = decoder.position(); let end_pos = decoder.position();
let expected_len: u64 = Decodable::decode(decoder)?; let expected_len: u64 = Decodable::decode(decoder);
assert_eq!((end_pos - start_pos) as u64, expected_len); assert_eq!((end_pos - start_pos) as u64, expected_len);
Ok(value) value
} }
impl<'a, 'tcx> TyDecoder<'tcx> for CacheDecoder<'a, 'tcx> { impl<'a, 'tcx> TyDecoder<'tcx> for CacheDecoder<'a, 'tcx> {
@ -572,26 +565,22 @@ impl<'a, 'tcx> TyDecoder<'tcx> for CacheDecoder<'a, 'tcx> {
self.opaque.data[self.opaque.position()] self.opaque.data[self.opaque.position()]
} }
fn cached_ty_for_shorthand<F>( fn cached_ty_for_shorthand<F>(&mut self, shorthand: usize, or_insert_with: F) -> Ty<'tcx>
&mut self,
shorthand: usize,
or_insert_with: F,
) -> Result<Ty<'tcx>, Self::Error>
where where
F: FnOnce(&mut Self) -> Result<Ty<'tcx>, Self::Error>, F: FnOnce(&mut Self) -> Ty<'tcx>,
{ {
let tcx = self.tcx(); let tcx = self.tcx();
let cache_key = ty::CReaderCacheKey { cnum: None, pos: shorthand }; let cache_key = ty::CReaderCacheKey { cnum: None, pos: shorthand };
if let Some(&ty) = tcx.ty_rcache.borrow().get(&cache_key) { if let Some(&ty) = tcx.ty_rcache.borrow().get(&cache_key) {
return Ok(ty); return ty;
} }
let ty = or_insert_with(self)?; let ty = or_insert_with(self);
// This may overwrite the entry, but it should overwrite with the same value. // This may overwrite the entry, but it should overwrite with the same value.
tcx.ty_rcache.borrow_mut().insert_same(cache_key, ty); tcx.ty_rcache.borrow_mut().insert_same(cache_key, ty);
Ok(ty) ty
} }
fn with_position<F, R>(&mut self, pos: usize, f: F) -> R fn with_position<F, R>(&mut self, pos: usize, f: F) -> R
@ -607,7 +596,7 @@ impl<'a, 'tcx> TyDecoder<'tcx> for CacheDecoder<'a, 'tcx> {
r r
} }
fn decode_alloc_id(&mut self) -> Result<interpret::AllocId, Self::Error> { fn decode_alloc_id(&mut self) -> interpret::AllocId {
let alloc_decoding_session = self.alloc_decoding_session; let alloc_decoding_session = self.alloc_decoding_session;
alloc_decoding_session.decode_alloc_id(self) alloc_decoding_session.decode_alloc_id(self)
} }
@ -619,35 +608,35 @@ rustc_middle::implement_ty_decoder!(CacheDecoder<'a, 'tcx>);
// when a `CacheDecoder` is passed to `Decodable::decode`. Unfortunately, we have to manually opt // when a `CacheDecoder` is passed to `Decodable::decode`. Unfortunately, we have to manually opt
// into specializations this way, given how `CacheDecoder` and the decoding traits currently work. // into specializations this way, given how `CacheDecoder` and the decoding traits currently work.
impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for Vec<u8> { impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for Vec<u8> {
fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Result<Self, String> { fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Self {
Decodable::decode(&mut d.opaque) Decodable::decode(&mut d.opaque)
} }
} }
impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for SyntaxContext { impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for SyntaxContext {
fn decode(decoder: &mut CacheDecoder<'a, 'tcx>) -> Result<Self, String> { fn decode(decoder: &mut CacheDecoder<'a, 'tcx>) -> Self {
let syntax_contexts = decoder.syntax_contexts; let syntax_contexts = decoder.syntax_contexts;
rustc_span::hygiene::decode_syntax_context(decoder, decoder.hygiene_context, |this, id| { rustc_span::hygiene::decode_syntax_context(decoder, decoder.hygiene_context, |this, id| {
// This closure is invoked if we haven't already decoded the data for the `SyntaxContext` we are deserializing. // This closure is invoked if we haven't already decoded the data for the `SyntaxContext` we are deserializing.
// We look up the position of the associated `SyntaxData` and decode it. // We look up the position of the associated `SyntaxData` and decode it.
let pos = syntax_contexts.get(&id).unwrap(); let pos = syntax_contexts.get(&id).unwrap();
this.with_position(pos.to_usize(), |decoder| { this.with_position(pos.to_usize(), |decoder| {
let data: SyntaxContextData = decode_tagged(decoder, TAG_SYNTAX_CONTEXT)?; let data: SyntaxContextData = decode_tagged(decoder, TAG_SYNTAX_CONTEXT);
Ok(data) data
}) })
}) })
} }
} }
impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for ExpnId { impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for ExpnId {
fn decode(decoder: &mut CacheDecoder<'a, 'tcx>) -> Result<Self, String> { fn decode(decoder: &mut CacheDecoder<'a, 'tcx>) -> Self {
let hash = ExpnHash::decode(decoder)?; let hash = ExpnHash::decode(decoder);
if hash.is_root() { if hash.is_root() {
return Ok(ExpnId::root()); return ExpnId::root();
} }
if let Some(expn_id) = ExpnId::from_hash(hash) { if let Some(expn_id) = ExpnId::from_hash(hash) {
return Ok(expn_id); return expn_id;
} }
let krate = decoder.tcx.stable_crate_id_to_crate_num(hash.stable_crate_id()); let krate = decoder.tcx.stable_crate_id_to_crate_num(hash.stable_crate_id());
@ -660,7 +649,7 @@ impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for ExpnId {
.unwrap_or_else(|| panic!("Bad hash {:?} (map {:?})", hash, decoder.expn_data)); .unwrap_or_else(|| panic!("Bad hash {:?} (map {:?})", hash, decoder.expn_data));
let data: ExpnData = decoder let data: ExpnData = decoder
.with_position(pos.to_usize(), |decoder| decode_tagged(decoder, TAG_EXPN_DATA))?; .with_position(pos.to_usize(), |decoder| decode_tagged(decoder, TAG_EXPN_DATA));
let expn_id = rustc_span::hygiene::register_local_expn_id(data, hash); let expn_id = rustc_span::hygiene::register_local_expn_id(data, hash);
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
@ -687,21 +676,21 @@ impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for ExpnId {
}; };
debug_assert_eq!(expn_id.krate, krate); debug_assert_eq!(expn_id.krate, krate);
Ok(expn_id) expn_id
} }
} }
impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for Span { impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for Span {
fn decode(decoder: &mut CacheDecoder<'a, 'tcx>) -> Result<Self, String> { fn decode(decoder: &mut CacheDecoder<'a, 'tcx>) -> Self {
let ctxt = SyntaxContext::decode(decoder)?; let ctxt = SyntaxContext::decode(decoder);
let parent = Option::<LocalDefId>::decode(decoder)?; let parent = Option::<LocalDefId>::decode(decoder);
let tag: u8 = Decodable::decode(decoder)?; let tag: u8 = Decodable::decode(decoder);
if tag == TAG_PARTIAL_SPAN { if tag == TAG_PARTIAL_SPAN {
return Ok(Span::new(BytePos(0), BytePos(0), ctxt, parent)); return Span::new(BytePos(0), BytePos(0), ctxt, parent);
} else if tag == TAG_RELATIVE_SPAN { } else if tag == TAG_RELATIVE_SPAN {
let dlo = u32::decode(decoder)?; let dlo = u32::decode(decoder);
let dto = u32::decode(decoder)?; let dto = u32::decode(decoder);
let enclosing = let enclosing =
decoder.tcx.definitions_untracked().def_span(parent.unwrap()).data_untracked(); decoder.tcx.definitions_untracked().def_span(parent.unwrap()).data_untracked();
@ -712,29 +701,29 @@ impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for Span {
parent, parent,
); );
return Ok(span); return span;
} else { } else {
debug_assert_eq!(tag, TAG_FULL_SPAN); debug_assert_eq!(tag, TAG_FULL_SPAN);
} }
let file_lo_index = SourceFileIndex::decode(decoder)?; let file_lo_index = SourceFileIndex::decode(decoder);
let line_lo = usize::decode(decoder)?; let line_lo = usize::decode(decoder);
let col_lo = BytePos::decode(decoder)?; let col_lo = BytePos::decode(decoder);
let len = BytePos::decode(decoder)?; let len = BytePos::decode(decoder);
let file_lo = decoder.file_index_to_file(file_lo_index); let file_lo = decoder.file_index_to_file(file_lo_index);
let lo = file_lo.lines[line_lo - 1] + col_lo; let lo = file_lo.lines[line_lo - 1] + col_lo;
let hi = lo + len; let hi = lo + len;
Ok(Span::new(lo, hi, ctxt, parent)) Span::new(lo, hi, ctxt, parent)
} }
} }
impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for CrateNum { impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for CrateNum {
fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Result<Self, String> { fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Self {
let stable_id = StableCrateId::decode(d)?; let stable_id = StableCrateId::decode(d);
let cnum = d.tcx.stable_crate_id_to_crate_num(stable_id); let cnum = d.tcx.stable_crate_id_to_crate_num(stable_id);
Ok(cnum) cnum
} }
} }
@ -743,8 +732,8 @@ impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for CrateNum {
// because we would not know how to transform the `DefIndex` to the current // because we would not know how to transform the `DefIndex` to the current
// context. // context.
impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for DefIndex { impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for DefIndex {
fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Result<DefIndex, String> { fn decode(_d: &mut CacheDecoder<'a, 'tcx>) -> DefIndex {
Err(d.error("trying to decode `DefIndex` outside the context of a `DefId`")) panic!("trying to decode `DefIndex` outside the context of a `DefId`")
} }
} }
@ -752,23 +741,23 @@ impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for DefIndex {
// compilation sessions. We use the `DefPathHash`, which is stable across // compilation sessions. We use the `DefPathHash`, which is stable across
// sessions, to map the old `DefId` to the new one. // sessions, to map the old `DefId` to the new one.
impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for DefId { impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for DefId {
fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Result<Self, String> { fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Self {
// Load the `DefPathHash` which is was we encoded the `DefId` as. // Load the `DefPathHash` which is was we encoded the `DefId` as.
let def_path_hash = DefPathHash::decode(d)?; let def_path_hash = DefPathHash::decode(d);
// Using the `DefPathHash`, we can lookup the new `DefId`. // Using the `DefPathHash`, we can lookup the new `DefId`.
// Subtle: We only encode a `DefId` as part of a query result. // Subtle: We only encode a `DefId` as part of a query result.
// If we get to this point, then all of the query inputs were green, // If we get to this point, then all of the query inputs were green,
// which means that the definition with this hash is guaranteed to // which means that the definition with this hash is guaranteed to
// still exist in the current compilation session. // still exist in the current compilation session.
Ok(d.tcx().def_path_hash_to_def_id(def_path_hash, &mut || { d.tcx().def_path_hash_to_def_id(def_path_hash, &mut || {
panic!("Failed to convert DefPathHash {:?}", def_path_hash) panic!("Failed to convert DefPathHash {:?}", def_path_hash)
})) })
} }
} }
impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for &'tcx FxHashSet<LocalDefId> { impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for &'tcx FxHashSet<LocalDefId> {
fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Result<Self, String> { fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Self {
RefDecodable::decode(d) RefDecodable::decode(d)
} }
} }
@ -776,31 +765,31 @@ impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for &'tcx FxHashSet<LocalDefId>
impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>>
for &'tcx IndexVec<mir::Promoted, mir::Body<'tcx>> for &'tcx IndexVec<mir::Promoted, mir::Body<'tcx>>
{ {
fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Result<Self, String> { fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Self {
RefDecodable::decode(d) RefDecodable::decode(d)
} }
} }
impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for &'tcx [thir::abstract_const::Node<'tcx>] { impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for &'tcx [thir::abstract_const::Node<'tcx>] {
fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Result<Self, String> { fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Self {
RefDecodable::decode(d) RefDecodable::decode(d)
} }
} }
impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for &'tcx [(ty::Predicate<'tcx>, Span)] { impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for &'tcx [(ty::Predicate<'tcx>, Span)] {
fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Result<Self, String> { fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Self {
RefDecodable::decode(d) RefDecodable::decode(d)
} }
} }
impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for &'tcx [rustc_ast::InlineAsmTemplatePiece] { impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for &'tcx [rustc_ast::InlineAsmTemplatePiece] {
fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Result<Self, String> { fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Self {
RefDecodable::decode(d) RefDecodable::decode(d)
} }
} }
impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for &'tcx [Span] { impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for &'tcx [Span] {
fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Result<Self, String> { fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Self {
RefDecodable::decode(d) RefDecodable::decode(d)
} }
} }

View file

@ -100,7 +100,7 @@ impl<'a, K: DepKind + Decodable<opaque::Decoder<'a>>> Decodable<opaque::Decoder<
for SerializedDepGraph<K> for SerializedDepGraph<K>
{ {
#[instrument(level = "debug", skip(d))] #[instrument(level = "debug", skip(d))]
fn decode(d: &mut opaque::Decoder<'a>) -> Result<SerializedDepGraph<K>, String> { fn decode(d: &mut opaque::Decoder<'a>) -> SerializedDepGraph<K> {
let start_position = d.position(); let start_position = d.position();
// The last 16 bytes are the node count and edge count. // The last 16 bytes are the node count and edge count.
@ -108,8 +108,8 @@ impl<'a, K: DepKind + Decodable<opaque::Decoder<'a>>> Decodable<opaque::Decoder<
d.set_position(d.data.len() - 2 * IntEncodedWithFixedSize::ENCODED_SIZE); d.set_position(d.data.len() - 2 * IntEncodedWithFixedSize::ENCODED_SIZE);
debug!("position: {:?}", d.position()); debug!("position: {:?}", d.position());
let node_count = IntEncodedWithFixedSize::decode(d)?.0 as usize; let node_count = IntEncodedWithFixedSize::decode(d).0 as usize;
let edge_count = IntEncodedWithFixedSize::decode(d)?.0 as usize; let edge_count = IntEncodedWithFixedSize::decode(d).0 as usize;
debug!(?node_count, ?edge_count); debug!(?node_count, ?edge_count);
debug!("position: {:?}", d.position()); debug!("position: {:?}", d.position());
@ -123,12 +123,12 @@ impl<'a, K: DepKind + Decodable<opaque::Decoder<'a>>> Decodable<opaque::Decoder<
for _index in 0..node_count { for _index in 0..node_count {
d.read_struct(|d| { d.read_struct(|d| {
let dep_node: DepNode<K> = d.read_struct_field("node", Decodable::decode)?; let dep_node: DepNode<K> = d.read_struct_field("node", Decodable::decode);
let _i: SerializedDepNodeIndex = nodes.push(dep_node); let _i: SerializedDepNodeIndex = nodes.push(dep_node);
debug_assert_eq!(_i.index(), _index); debug_assert_eq!(_i.index(), _index);
let fingerprint: Fingerprint = let fingerprint: Fingerprint =
d.read_struct_field("fingerprint", Decodable::decode)?; d.read_struct_field("fingerprint", Decodable::decode);
let _i: SerializedDepNodeIndex = fingerprints.push(fingerprint); let _i: SerializedDepNodeIndex = fingerprints.push(fingerprint);
debug_assert_eq!(_i.index(), _index); debug_assert_eq!(_i.index(), _index);
@ -136,22 +136,22 @@ impl<'a, K: DepKind + Decodable<opaque::Decoder<'a>>> Decodable<opaque::Decoder<
d.read_seq(|d, len| { d.read_seq(|d, len| {
let start = edge_list_data.len().try_into().unwrap(); let start = edge_list_data.len().try_into().unwrap();
for _ in 0..len { for _ in 0..len {
let edge = d.read_seq_elt(Decodable::decode)?; let edge = d.read_seq_elt(Decodable::decode);
edge_list_data.push(edge); edge_list_data.push(edge);
} }
let end = edge_list_data.len().try_into().unwrap(); let end = edge_list_data.len().try_into().unwrap();
let _i: SerializedDepNodeIndex = edge_list_indices.push((start, end)); let _i: SerializedDepNodeIndex = edge_list_indices.push((start, end));
debug_assert_eq!(_i.index(), _index); debug_assert_eq!(_i.index(), _index);
Ok(()) ()
}) })
}) })
})?; });
} }
let index: FxHashMap<_, _> = let index: FxHashMap<_, _> =
nodes.iter_enumerated().map(|(idx, &dep_node)| (dep_node, idx)).collect(); nodes.iter_enumerated().map(|(idx, &dep_node)| (dep_node, idx)).collect();
Ok(SerializedDepGraph { nodes, fingerprints, edge_list_indices, edge_list_data, index }) SerializedDepGraph { nodes, fingerprints, edge_list_indices, edge_list_data, index }
} }
} }

View file

@ -17,15 +17,8 @@ impl<S: Encoder, A: Array<Item: Encodable<S>>> Encodable<S> for SmallVec<A> {
} }
impl<D: Decoder, A: Array<Item: Decodable<D>>> Decodable<D> for SmallVec<A> { impl<D: Decoder, A: Array<Item: Decodable<D>>> Decodable<D> for SmallVec<A> {
fn decode(d: &mut D) -> Result<SmallVec<A>, D::Error> { fn decode(d: &mut D) -> SmallVec<A> {
d.read_seq(|d, len| { d.read_seq(|d, len| (0..len).map(|_| d.read_seq_elt(|d| Decodable::decode(d))).collect())
let mut vec = SmallVec::with_capacity(len);
// FIXME(#48994) - could just be collected into a Result<SmallVec, D::Error>
for _ in 0..len {
vec.push(d.read_seq_elt(|d| Decodable::decode(d))?);
}
Ok(vec)
})
} }
} }
@ -41,14 +34,8 @@ impl<S: Encoder, T: Encodable<S>> Encodable<S> for LinkedList<T> {
} }
impl<D: Decoder, T: Decodable<D>> Decodable<D> for LinkedList<T> { impl<D: Decoder, T: Decodable<D>> Decodable<D> for LinkedList<T> {
fn decode(d: &mut D) -> Result<LinkedList<T>, D::Error> { fn decode(d: &mut D) -> LinkedList<T> {
d.read_seq(|d, len| { d.read_seq(|d, len| (0..len).map(|_| d.read_seq_elt(|d| Decodable::decode(d))).collect())
let mut list = LinkedList::new();
for _ in 0..len {
list.push_back(d.read_seq_elt(|d| Decodable::decode(d))?);
}
Ok(list)
})
} }
} }
@ -64,14 +51,8 @@ impl<S: Encoder, T: Encodable<S>> Encodable<S> for VecDeque<T> {
} }
impl<D: Decoder, T: Decodable<D>> Decodable<D> for VecDeque<T> { impl<D: Decoder, T: Decodable<D>> Decodable<D> for VecDeque<T> {
fn decode(d: &mut D) -> Result<VecDeque<T>, D::Error> { fn decode(d: &mut D) -> VecDeque<T> {
d.read_seq(|d, len| { d.read_seq(|d, len| (0..len).map(|_| d.read_seq_elt(|d| Decodable::decode(d))).collect())
let mut deque: VecDeque<T> = VecDeque::with_capacity(len);
for _ in 0..len {
deque.push_back(d.read_seq_elt(|d| Decodable::decode(d))?);
}
Ok(deque)
})
} }
} }
@ -96,15 +77,15 @@ where
K: Decodable<D> + PartialEq + Ord, K: Decodable<D> + PartialEq + Ord,
V: Decodable<D>, V: Decodable<D>,
{ {
fn decode(d: &mut D) -> Result<BTreeMap<K, V>, D::Error> { fn decode(d: &mut D) -> BTreeMap<K, V> {
d.read_map(|d, len| { d.read_map(|d, len| {
let mut map = BTreeMap::new(); let mut map = BTreeMap::new();
for _ in 0..len { for _ in 0..len {
let key = d.read_map_elt_key(|d| Decodable::decode(d))?; let key = d.read_map_elt_key(|d| Decodable::decode(d));
let val = d.read_map_elt_val(|d| Decodable::decode(d))?; let val = d.read_map_elt_val(|d| Decodable::decode(d));
map.insert(key, val); map.insert(key, val);
} }
Ok(map) map
}) })
} }
} }
@ -127,13 +108,13 @@ impl<D: Decoder, T> Decodable<D> for BTreeSet<T>
where where
T: Decodable<D> + PartialEq + Ord, T: Decodable<D> + PartialEq + Ord,
{ {
fn decode(d: &mut D) -> Result<BTreeSet<T>, D::Error> { fn decode(d: &mut D) -> BTreeSet<T> {
d.read_seq(|d, len| { d.read_seq(|d, len| {
let mut set = BTreeSet::new(); let mut set = BTreeSet::new();
for _ in 0..len { for _ in 0..len {
set.insert(d.read_seq_elt(|d| Decodable::decode(d))?); set.insert(d.read_seq_elt(|d| Decodable::decode(d)));
} }
Ok(set) set
}) })
} }
} }
@ -161,16 +142,16 @@ where
V: Decodable<D>, V: Decodable<D>,
S: BuildHasher + Default, S: BuildHasher + Default,
{ {
fn decode(d: &mut D) -> Result<HashMap<K, V, S>, D::Error> { fn decode(d: &mut D) -> HashMap<K, V, S> {
d.read_map(|d, len| { d.read_map(|d, len| {
let state = Default::default(); let state = Default::default();
let mut map = HashMap::with_capacity_and_hasher(len, state); let mut map = HashMap::with_capacity_and_hasher(len, state);
for _ in 0..len { for _ in 0..len {
let key = d.read_map_elt_key(|d| Decodable::decode(d))?; let key = d.read_map_elt_key(|d| Decodable::decode(d));
let val = d.read_map_elt_val(|d| Decodable::decode(d))?; let val = d.read_map_elt_val(|d| Decodable::decode(d));
map.insert(key, val); map.insert(key, val);
} }
Ok(map) map
}) })
} }
} }
@ -205,14 +186,14 @@ where
T: Decodable<D> + Hash + Eq, T: Decodable<D> + Hash + Eq,
S: BuildHasher + Default, S: BuildHasher + Default,
{ {
fn decode(d: &mut D) -> Result<HashSet<T, S>, D::Error> { fn decode(d: &mut D) -> HashSet<T, S> {
d.read_seq(|d, len| { d.read_seq(|d, len| {
let state = Default::default(); let state = Default::default();
let mut set = HashSet::with_capacity_and_hasher(len, state); let mut set = HashSet::with_capacity_and_hasher(len, state);
for _ in 0..len { for _ in 0..len {
set.insert(d.read_seq_elt(|d| Decodable::decode(d))?); set.insert(d.read_seq_elt(|d| Decodable::decode(d)));
} }
Ok(set) set
}) })
} }
} }
@ -240,16 +221,16 @@ where
V: Decodable<D>, V: Decodable<D>,
S: BuildHasher + Default, S: BuildHasher + Default,
{ {
fn decode(d: &mut D) -> Result<indexmap::IndexMap<K, V, S>, D::Error> { fn decode(d: &mut D) -> indexmap::IndexMap<K, V, S> {
d.read_map(|d, len| { d.read_map(|d, len| {
let state = Default::default(); let state = Default::default();
let mut map = indexmap::IndexMap::with_capacity_and_hasher(len, state); let mut map = indexmap::IndexMap::with_capacity_and_hasher(len, state);
for _ in 0..len { for _ in 0..len {
let key = d.read_map_elt_key(|d| Decodable::decode(d))?; let key = d.read_map_elt_key(|d| Decodable::decode(d));
let val = d.read_map_elt_val(|d| Decodable::decode(d))?; let val = d.read_map_elt_val(|d| Decodable::decode(d));
map.insert(key, val); map.insert(key, val);
} }
Ok(map) map
}) })
} }
} }
@ -274,14 +255,14 @@ where
T: Decodable<D> + Hash + Eq, T: Decodable<D> + Hash + Eq,
S: BuildHasher + Default, S: BuildHasher + Default,
{ {
fn decode(d: &mut D) -> Result<indexmap::IndexSet<T, S>, D::Error> { fn decode(d: &mut D) -> indexmap::IndexSet<T, S> {
d.read_seq(|d, len| { d.read_seq(|d, len| {
let state = Default::default(); let state = Default::default();
let mut set = indexmap::IndexSet::with_capacity_and_hasher(len, state); let mut set = indexmap::IndexSet::with_capacity_and_hasher(len, state);
for _ in 0..len { for _ in 0..len {
set.insert(d.read_seq_elt(|d| Decodable::decode(d))?); set.insert(d.read_seq_elt(|d| Decodable::decode(d)));
} }
Ok(set) set
}) })
} }
} }
@ -294,9 +275,9 @@ impl<E: Encoder, T: Encodable<E>> Encodable<E> for Rc<[T]> {
} }
impl<D: Decoder, T: Decodable<D>> Decodable<D> for Rc<[T]> { impl<D: Decoder, T: Decodable<D>> Decodable<D> for Rc<[T]> {
fn decode(d: &mut D) -> Result<Rc<[T]>, D::Error> { fn decode(d: &mut D) -> Rc<[T]> {
let vec: Vec<T> = Decodable::decode(d)?; let vec: Vec<T> = Decodable::decode(d);
Ok(vec.into()) vec.into()
} }
} }
@ -308,8 +289,8 @@ impl<E: Encoder, T: Encodable<E>> Encodable<E> for Arc<[T]> {
} }
impl<D: Decoder, T: Decodable<D>> Decodable<D> for Arc<[T]> { impl<D: Decoder, T: Decodable<D>> Decodable<D> for Arc<[T]> {
fn decode(d: &mut D) -> Result<Arc<[T]>, D::Error> { fn decode(d: &mut D) -> Arc<[T]> {
let vec: Vec<T> = Decodable::decode(d)?; let vec: Vec<T> = Decodable::decode(d);
Ok(vec.into()) vec.into()
} }
} }

View file

@ -89,7 +89,7 @@
//! let encoded = json::encode(&object).unwrap(); //! let encoded = json::encode(&object).unwrap();
//! //!
//! // Deserialize using `json::decode` //! // Deserialize using `json::decode`
//! let decoded: TestStruct = json::decode(&encoded[..]).unwrap(); //! let decoded: TestStruct = json::decode(&encoded[..]);
//! ``` //! ```
//! //!
//! ## Using the `ToJson` trait //! ## Using the `ToJson` trait
@ -173,7 +173,7 @@
//! let json_str: String = json_obj.to_string(); //! let json_str: String = json_obj.to_string();
//! //!
//! // Deserialize like before //! // Deserialize like before
//! let decoded: TestStruct = json::decode(&json_str).unwrap(); //! let decoded: TestStruct = json::decode(&json_str);
//! ``` //! ```
use self::DecoderError::*; use self::DecoderError::*;
@ -265,6 +265,12 @@ pub enum DecoderError {
ApplicationError(string::String), ApplicationError(string::String),
} }
macro_rules! bad {
($e:expr) => {{
panic!("json decode error: {:?}", $e);
}};
}
#[derive(Copy, Clone, Debug)] #[derive(Copy, Clone, Debug)]
pub enum EncoderError { pub enum EncoderError {
FmtError(fmt::Error), FmtError(fmt::Error),
@ -295,10 +301,10 @@ pub fn error_str(error: ErrorCode) -> &'static str {
} }
/// Shortcut function to decode a JSON `&str` into an object /// Shortcut function to decode a JSON `&str` into an object
pub fn decode<T: crate::Decodable<Decoder>>(s: &str) -> DecodeResult<T> { pub fn decode<T: crate::Decodable<Decoder>>(s: &str) -> T {
let json = match from_str(s) { let json = match from_str(s) {
Ok(x) => x, Ok(x) => x,
Err(e) => return Err(ParseError(e)), Err(e) => bad!(ParseError(e)),
}; };
let mut decoder = Decoder::new(json); let mut decoder = Decoder::new(json);
@ -334,15 +340,6 @@ impl fmt::Display for ParserError {
} }
} }
impl fmt::Display for DecoderError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// FIXME this should be a nicer error
fmt::Debug::fmt(self, f)
}
}
impl std::error::Error for DecoderError {}
impl fmt::Display for EncoderError { impl fmt::Display for EncoderError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// FIXME this should be a nicer error // FIXME this should be a nicer error
@ -2206,41 +2203,39 @@ impl Decoder {
macro_rules! expect { macro_rules! expect {
($e:expr, Null) => {{ ($e:expr, Null) => {{
match $e { match $e {
Json::Null => Ok(()), Json::Null => (),
other => Err(ExpectedError("Null".to_owned(), other.to_string())), other => bad!(ExpectedError("Null".to_owned(), other.to_string())),
} }
}}; }};
($e:expr, $t:ident) => {{ ($e:expr, $t:ident) => {{
match $e { match $e {
Json::$t(v) => Ok(v), Json::$t(v) => v,
other => Err(ExpectedError(stringify!($t).to_owned(), other.to_string())), other => bad!(ExpectedError(stringify!($t).to_owned(), other.to_string())),
} }
}}; }};
} }
macro_rules! read_primitive { macro_rules! read_primitive {
($name:ident, $ty:ty) => { ($name:ident, $ty:ty) => {
fn $name(&mut self) -> DecodeResult<$ty> { fn $name(&mut self) -> $ty {
match self.pop() { match self.pop() {
Json::I64(f) => Ok(f as $ty), Json::I64(f) => f as $ty,
Json::U64(f) => Ok(f as $ty), Json::U64(f) => f as $ty,
Json::F64(f) => Err(ExpectedError("Integer".to_owned(), f.to_string())), Json::F64(f) => bad!(ExpectedError("Integer".to_owned(), f.to_string())),
// re: #12967.. a type w/ numeric keys (ie HashMap<usize, V> etc) // re: #12967.. a type w/ numeric keys (ie HashMap<usize, V> etc)
// is going to have a string here, as per JSON spec. // is going to have a string here, as per JSON spec.
Json::String(s) => match s.parse().ok() { Json::String(s) => match s.parse().ok() {
Some(f) => Ok(f), Some(f) => f,
None => Err(ExpectedError("Number".to_owned(), s)), None => bad!(ExpectedError("Number".to_owned(), s)),
}, },
value => Err(ExpectedError("Number".to_owned(), value.to_string())), value => bad!(ExpectedError("Number".to_owned(), value.to_string())),
} }
} }
}; };
} }
impl crate::Decoder for Decoder { impl crate::Decoder for Decoder {
type Error = DecoderError; fn read_unit(&mut self) -> () {
fn read_unit(&mut self) -> DecodeResult<()> {
expect!(self.pop(), Null) expect!(self.pop(), Null)
} }
@ -2257,156 +2252,151 @@ impl crate::Decoder for Decoder {
read_primitive! { read_i64, i64 } read_primitive! { read_i64, i64 }
read_primitive! { read_i128, i128 } read_primitive! { read_i128, i128 }
fn read_f32(&mut self) -> DecodeResult<f32> { fn read_f32(&mut self) -> f32 {
self.read_f64().map(|x| x as f32) self.read_f64() as f32
} }
fn read_f64(&mut self) -> DecodeResult<f64> { fn read_f64(&mut self) -> f64 {
match self.pop() { match self.pop() {
Json::I64(f) => Ok(f as f64), Json::I64(f) => f as f64,
Json::U64(f) => Ok(f as f64), Json::U64(f) => f as f64,
Json::F64(f) => Ok(f), Json::F64(f) => f,
Json::String(s) => { Json::String(s) => {
// re: #12967.. a type w/ numeric keys (ie HashMap<usize, V> etc) // re: #12967.. a type w/ numeric keys (ie HashMap<usize, V> etc)
// is going to have a string here, as per JSON spec. // is going to have a string here, as per JSON spec.
match s.parse().ok() { match s.parse().ok() {
Some(f) => Ok(f), Some(f) => f,
None => Err(ExpectedError("Number".to_owned(), s)), None => bad!(ExpectedError("Number".to_owned(), s)),
} }
} }
Json::Null => Ok(f64::NAN), Json::Null => f64::NAN,
value => Err(ExpectedError("Number".to_owned(), value.to_string())), value => bad!(ExpectedError("Number".to_owned(), value.to_string())),
} }
} }
fn read_bool(&mut self) -> DecodeResult<bool> { fn read_bool(&mut self) -> bool {
expect!(self.pop(), Boolean) expect!(self.pop(), Boolean)
} }
fn read_char(&mut self) -> DecodeResult<char> { fn read_char(&mut self) -> char {
let s = self.read_str()?; let s = self.read_str();
{ let mut it = s.chars();
let mut it = s.chars(); if let (Some(c), None) = (it.next(), it.next()) {
if let (Some(c), None) = (it.next(), it.next()) { // exactly one character
// exactly one character return c;
return Ok(c);
}
} }
Err(ExpectedError("single character string".to_owned(), s.to_string())) bad!(ExpectedError("single character string".to_owned(), s.to_string()));
} }
fn read_str(&mut self) -> DecodeResult<Cow<'_, str>> { fn read_str(&mut self) -> Cow<'_, str> {
expect!(self.pop(), String).map(Cow::Owned) Cow::Owned(expect!(self.pop(), String))
} }
fn read_raw_bytes_into(&mut self, s: &mut [u8]) -> Result<(), Self::Error> { fn read_raw_bytes_into(&mut self, s: &mut [u8]) {
for c in s.iter_mut() { for c in s.iter_mut() {
*c = self.read_u8()?; *c = self.read_u8();
} }
Ok(()) ()
} }
fn read_enum<T, F>(&mut self, f: F) -> DecodeResult<T> fn read_enum<T, F>(&mut self, f: F) -> T
where where
F: FnOnce(&mut Decoder) -> DecodeResult<T>, F: FnOnce(&mut Decoder) -> T,
{ {
f(self) f(self)
} }
fn read_enum_variant<T, F>(&mut self, names: &[&str], mut f: F) -> DecodeResult<T> fn read_enum_variant<T, F>(&mut self, names: &[&str], mut f: F) -> T
where where
F: FnMut(&mut Decoder, usize) -> DecodeResult<T>, F: FnMut(&mut Decoder, usize) -> T,
{ {
let name = match self.pop() { let name = match self.pop() {
Json::String(s) => s, Json::String(s) => s,
Json::Object(mut o) => { Json::Object(mut o) => {
let n = match o.remove("variant") { let n = match o.remove("variant") {
Some(Json::String(s)) => s, Some(Json::String(s)) => s,
Some(val) => return Err(ExpectedError("String".to_owned(), val.to_string())), Some(val) => bad!(ExpectedError("String".to_owned(), val.to_string())),
None => return Err(MissingFieldError("variant".to_owned())), None => bad!(MissingFieldError("variant".to_owned())),
}; };
match o.remove("fields") { match o.remove("fields") {
Some(Json::Array(l)) => { Some(Json::Array(l)) => {
self.stack.extend(l.into_iter().rev()); self.stack.extend(l.into_iter().rev());
} }
Some(val) => return Err(ExpectedError("Array".to_owned(), val.to_string())), Some(val) => bad!(ExpectedError("Array".to_owned(), val.to_string())),
None => return Err(MissingFieldError("fields".to_owned())), None => bad!(MissingFieldError("fields".to_owned())),
} }
n n
} }
json => return Err(ExpectedError("String or Object".to_owned(), json.to_string())), json => bad!(ExpectedError("String or Object".to_owned(), json.to_string())),
}; };
let idx = match names.iter().position(|n| *n == &name[..]) { let idx = match names.iter().position(|n| *n == &name[..]) {
Some(idx) => idx, Some(idx) => idx,
None => return Err(UnknownVariantError(name)), None => bad!(UnknownVariantError(name)),
}; };
f(self, idx) f(self, idx)
} }
fn read_enum_variant_arg<T, F>(&mut self, f: F) -> DecodeResult<T> fn read_enum_variant_arg<T, F>(&mut self, f: F) -> T
where where
F: FnOnce(&mut Decoder) -> DecodeResult<T>, F: FnOnce(&mut Decoder) -> T,
{ {
f(self) f(self)
} }
fn read_struct<T, F>(&mut self, f: F) -> DecodeResult<T> fn read_struct<T, F>(&mut self, f: F) -> T
where where
F: FnOnce(&mut Decoder) -> DecodeResult<T>, F: FnOnce(&mut Decoder) -> T,
{ {
let value = f(self)?; let value = f(self);
self.pop(); self.pop();
Ok(value) value
} }
fn read_struct_field<T, F>(&mut self, name: &str, f: F) -> DecodeResult<T> fn read_struct_field<T, F>(&mut self, name: &str, f: F) -> T
where where
F: FnOnce(&mut Decoder) -> DecodeResult<T>, F: FnOnce(&mut Decoder) -> T,
{ {
let mut obj = expect!(self.pop(), Object)?; let mut obj = expect!(self.pop(), Object);
let value = match obj.remove(name) { let value = match obj.remove(name) {
None => { None => {
// Add a Null and try to parse it as an Option<_> // Add a Null and try to parse it as an Option<_>
// to get None as a default value. // to get None as a default value.
self.stack.push(Json::Null); self.stack.push(Json::Null);
match f(self) { f(self)
Ok(x) => x,
Err(_) => return Err(MissingFieldError(name.to_string())),
}
} }
Some(json) => { Some(json) => {
self.stack.push(json); self.stack.push(json);
f(self)? f(self)
} }
}; };
self.stack.push(Json::Object(obj)); self.stack.push(Json::Object(obj));
Ok(value) value
} }
fn read_tuple<T, F>(&mut self, tuple_len: usize, f: F) -> DecodeResult<T> fn read_tuple<T, F>(&mut self, tuple_len: usize, f: F) -> T
where where
F: FnOnce(&mut Decoder) -> DecodeResult<T>, F: FnOnce(&mut Decoder) -> T,
{ {
self.read_seq(move |d, len| { self.read_seq(move |d, len| {
if len == tuple_len { if len == tuple_len {
f(d) f(d)
} else { } else {
Err(ExpectedError(format!("Tuple{}", tuple_len), format!("Tuple{}", len))) bad!(ExpectedError(format!("Tuple{}", tuple_len), format!("Tuple{}", len)));
} }
}) })
} }
fn read_tuple_arg<T, F>(&mut self, f: F) -> DecodeResult<T> fn read_tuple_arg<T, F>(&mut self, f: F) -> T
where where
F: FnOnce(&mut Decoder) -> DecodeResult<T>, F: FnOnce(&mut Decoder) -> T,
{ {
self.read_seq_elt(f) self.read_seq_elt(f)
} }
fn read_option<T, F>(&mut self, mut f: F) -> DecodeResult<T> fn read_option<T, F>(&mut self, mut f: F) -> T
where where
F: FnMut(&mut Decoder, bool) -> DecodeResult<T>, F: FnMut(&mut Decoder, bool) -> T,
{ {
match self.pop() { match self.pop() {
Json::Null => f(self, false), Json::Null => f(self, false),
@ -2417,28 +2407,28 @@ impl crate::Decoder for Decoder {
} }
} }
fn read_seq<T, F>(&mut self, f: F) -> DecodeResult<T> fn read_seq<T, F>(&mut self, f: F) -> T
where where
F: FnOnce(&mut Decoder, usize) -> DecodeResult<T>, F: FnOnce(&mut Decoder, usize) -> T,
{ {
let array = expect!(self.pop(), Array)?; let array = expect!(self.pop(), Array);
let len = array.len(); let len = array.len();
self.stack.extend(array.into_iter().rev()); self.stack.extend(array.into_iter().rev());
f(self, len) f(self, len)
} }
fn read_seq_elt<T, F>(&mut self, f: F) -> DecodeResult<T> fn read_seq_elt<T, F>(&mut self, f: F) -> T
where where
F: FnOnce(&mut Decoder) -> DecodeResult<T>, F: FnOnce(&mut Decoder) -> T,
{ {
f(self) f(self)
} }
fn read_map<T, F>(&mut self, f: F) -> DecodeResult<T> fn read_map<T, F>(&mut self, f: F) -> T
where where
F: FnOnce(&mut Decoder, usize) -> DecodeResult<T>, F: FnOnce(&mut Decoder, usize) -> T,
{ {
let obj = expect!(self.pop(), Object)?; let obj = expect!(self.pop(), Object);
let len = obj.len(); let len = obj.len();
for (key, value) in obj { for (key, value) in obj {
self.stack.push(value); self.stack.push(value);
@ -2447,23 +2437,19 @@ impl crate::Decoder for Decoder {
f(self, len) f(self, len)
} }
fn read_map_elt_key<T, F>(&mut self, f: F) -> DecodeResult<T> fn read_map_elt_key<T, F>(&mut self, f: F) -> T
where where
F: FnOnce(&mut Decoder) -> DecodeResult<T>, F: FnOnce(&mut Decoder) -> T,
{ {
f(self) f(self)
} }
fn read_map_elt_val<T, F>(&mut self, f: F) -> DecodeResult<T> fn read_map_elt_val<T, F>(&mut self, f: F) -> T
where where
F: FnOnce(&mut Decoder) -> DecodeResult<T>, F: FnOnce(&mut Decoder) -> T,
{ {
f(self) f(self)
} }
fn error(&mut self, err: &str) -> DecoderError {
ApplicationError(err.to_string())
}
} }
/// A trait for converting values to JSON /// A trait for converting values to JSON

View file

@ -560,134 +560,127 @@ impl<'a> Decoder<'a> {
} }
macro_rules! read_leb128 { macro_rules! read_leb128 {
($dec:expr, $fun:ident) => {{ Ok(leb128::$fun($dec.data, &mut $dec.position)) }}; ($dec:expr, $fun:ident) => {{ leb128::$fun($dec.data, &mut $dec.position) }};
} }
impl<'a> serialize::Decoder for Decoder<'a> { impl<'a> serialize::Decoder for Decoder<'a> {
type Error = String;
#[inline] #[inline]
fn read_unit(&mut self) -> Result<(), Self::Error> { fn read_unit(&mut self) -> () {
Ok(()) ()
} }
#[inline] #[inline]
fn read_u128(&mut self) -> Result<u128, Self::Error> { fn read_u128(&mut self) -> u128 {
read_leb128!(self, read_u128_leb128) read_leb128!(self, read_u128_leb128)
} }
#[inline] #[inline]
fn read_u64(&mut self) -> Result<u64, Self::Error> { fn read_u64(&mut self) -> u64 {
read_leb128!(self, read_u64_leb128) read_leb128!(self, read_u64_leb128)
} }
#[inline] #[inline]
fn read_u32(&mut self) -> Result<u32, Self::Error> { fn read_u32(&mut self) -> u32 {
read_leb128!(self, read_u32_leb128) read_leb128!(self, read_u32_leb128)
} }
#[inline] #[inline]
fn read_u16(&mut self) -> Result<u16, Self::Error> { fn read_u16(&mut self) -> u16 {
let bytes = [self.data[self.position], self.data[self.position + 1]]; let bytes = [self.data[self.position], self.data[self.position + 1]];
let value = u16::from_le_bytes(bytes); let value = u16::from_le_bytes(bytes);
self.position += 2; self.position += 2;
Ok(value) value
} }
#[inline] #[inline]
fn read_u8(&mut self) -> Result<u8, Self::Error> { fn read_u8(&mut self) -> u8 {
let value = self.data[self.position]; let value = self.data[self.position];
self.position += 1; self.position += 1;
Ok(value) value
} }
#[inline] #[inline]
fn read_usize(&mut self) -> Result<usize, Self::Error> { fn read_usize(&mut self) -> usize {
read_leb128!(self, read_usize_leb128) read_leb128!(self, read_usize_leb128)
} }
#[inline] #[inline]
fn read_i128(&mut self) -> Result<i128, Self::Error> { fn read_i128(&mut self) -> i128 {
read_leb128!(self, read_i128_leb128) read_leb128!(self, read_i128_leb128)
} }
#[inline] #[inline]
fn read_i64(&mut self) -> Result<i64, Self::Error> { fn read_i64(&mut self) -> i64 {
read_leb128!(self, read_i64_leb128) read_leb128!(self, read_i64_leb128)
} }
#[inline] #[inline]
fn read_i32(&mut self) -> Result<i32, Self::Error> { fn read_i32(&mut self) -> i32 {
read_leb128!(self, read_i32_leb128) read_leb128!(self, read_i32_leb128)
} }
#[inline] #[inline]
fn read_i16(&mut self) -> Result<i16, Self::Error> { fn read_i16(&mut self) -> i16 {
let bytes = [self.data[self.position], self.data[self.position + 1]]; let bytes = [self.data[self.position], self.data[self.position + 1]];
let value = i16::from_le_bytes(bytes); let value = i16::from_le_bytes(bytes);
self.position += 2; self.position += 2;
Ok(value) value
} }
#[inline] #[inline]
fn read_i8(&mut self) -> Result<i8, Self::Error> { fn read_i8(&mut self) -> i8 {
let as_u8 = self.data[self.position]; let as_u8 = self.data[self.position];
self.position += 1; self.position += 1;
unsafe { Ok(::std::mem::transmute(as_u8)) } unsafe { ::std::mem::transmute(as_u8) }
} }
#[inline] #[inline]
fn read_isize(&mut self) -> Result<isize, Self::Error> { fn read_isize(&mut self) -> isize {
read_leb128!(self, read_isize_leb128) read_leb128!(self, read_isize_leb128)
} }
#[inline] #[inline]
fn read_bool(&mut self) -> Result<bool, Self::Error> { fn read_bool(&mut self) -> bool {
let value = self.read_u8()?; let value = self.read_u8();
Ok(value != 0) value != 0
} }
#[inline] #[inline]
fn read_f64(&mut self) -> Result<f64, Self::Error> { fn read_f64(&mut self) -> f64 {
let bits = self.read_u64()?; let bits = self.read_u64();
Ok(f64::from_bits(bits)) f64::from_bits(bits)
} }
#[inline] #[inline]
fn read_f32(&mut self) -> Result<f32, Self::Error> { fn read_f32(&mut self) -> f32 {
let bits = self.read_u32()?; let bits = self.read_u32();
Ok(f32::from_bits(bits)) f32::from_bits(bits)
} }
#[inline] #[inline]
fn read_char(&mut self) -> Result<char, Self::Error> { fn read_char(&mut self) -> char {
let bits = self.read_u32()?; let bits = self.read_u32();
Ok(std::char::from_u32(bits).unwrap()) std::char::from_u32(bits).unwrap()
} }
#[inline] #[inline]
fn read_str(&mut self) -> Result<Cow<'_, str>, Self::Error> { fn read_str(&mut self) -> Cow<'_, str> {
let len = self.read_usize()?; let len = self.read_usize();
let sentinel = self.data[self.position + len]; let sentinel = self.data[self.position + len];
assert!(sentinel == STR_SENTINEL); assert!(sentinel == STR_SENTINEL);
let s = unsafe { let s = unsafe {
std::str::from_utf8_unchecked(&self.data[self.position..self.position + len]) std::str::from_utf8_unchecked(&self.data[self.position..self.position + len])
}; };
self.position += len + 1; self.position += len + 1;
Ok(Cow::Borrowed(s)) Cow::Borrowed(s)
} }
#[inline] #[inline]
fn error(&mut self, err: &str) -> Self::Error { fn read_raw_bytes_into(&mut self, s: &mut [u8]) -> () {
err.to_string()
}
#[inline]
fn read_raw_bytes_into(&mut self, s: &mut [u8]) -> Result<(), String> {
let start = self.position; let start = self.position;
self.position += s.len(); self.position += s.len();
s.copy_from_slice(&self.data[start..self.position]); s.copy_from_slice(&self.data[start..self.position]);
Ok(()) ()
} }
} }
@ -715,9 +708,9 @@ impl serialize::Encodable<FileEncoder> for [u8] {
// Specialize decoding `Vec<u8>`. This specialization also applies to decoding `Box<[u8]>`s, etc., // Specialize decoding `Vec<u8>`. This specialization also applies to decoding `Box<[u8]>`s, etc.,
// since the default implementations call `decode` to produce a `Vec<u8>` internally. // since the default implementations call `decode` to produce a `Vec<u8>` internally.
impl<'a> serialize::Decodable<Decoder<'a>> for Vec<u8> { impl<'a> serialize::Decodable<Decoder<'a>> for Vec<u8> {
fn decode(d: &mut Decoder<'a>) -> Result<Self, String> { fn decode(d: &mut Decoder<'a>) -> Self {
let len = serialize::Decoder::read_usize(d)?; let len = serialize::Decoder::read_usize(d);
Ok(d.read_raw_bytes(len).to_owned()) d.read_raw_bytes(len).to_owned()
} }
} }
@ -752,13 +745,13 @@ impl serialize::Encodable<FileEncoder> for IntEncodedWithFixedSize {
impl<'a> serialize::Decodable<Decoder<'a>> for IntEncodedWithFixedSize { impl<'a> serialize::Decodable<Decoder<'a>> for IntEncodedWithFixedSize {
#[inline] #[inline]
fn decode(decoder: &mut Decoder<'a>) -> Result<IntEncodedWithFixedSize, String> { fn decode(decoder: &mut Decoder<'a>) -> IntEncodedWithFixedSize {
let _start_pos = decoder.position(); let _start_pos = decoder.position();
let bytes = decoder.read_raw_bytes(IntEncodedWithFixedSize::ENCODED_SIZE); let bytes = decoder.read_raw_bytes(IntEncodedWithFixedSize::ENCODED_SIZE);
let _end_pos = decoder.position(); let _end_pos = decoder.position();
debug_assert_eq!((_end_pos - _start_pos), IntEncodedWithFixedSize::ENCODED_SIZE); debug_assert_eq!((_end_pos - _start_pos), IntEncodedWithFixedSize::ENCODED_SIZE);
let value = u64::from_le_bytes(bytes.try_into().unwrap()); let value = u64::from_le_bytes(bytes.try_into().unwrap());
Ok(IntEncodedWithFixedSize(value)) IntEncodedWithFixedSize(value)
} }
} }

View file

@ -173,144 +173,145 @@ pub trait Encoder {
} }
} }
// Note: all the methods in this trait are infallible, which may be surprising.
// They used to be fallible (i.e. return a `Result`) but many of the impls just
// panicked when something went wrong, and for the cases that didn't the
// top-level invocation would also just panic on failure. Switching to
// infallibility made things faster and lots of code a little simpler and more
// concise.
pub trait Decoder { pub trait Decoder {
type Error;
// Primitive types: // Primitive types:
fn read_unit(&mut self) -> Result<(), Self::Error>; fn read_unit(&mut self) -> ();
fn read_usize(&mut self) -> Result<usize, Self::Error>; fn read_usize(&mut self) -> usize;
fn read_u128(&mut self) -> Result<u128, Self::Error>; fn read_u128(&mut self) -> u128;
fn read_u64(&mut self) -> Result<u64, Self::Error>; fn read_u64(&mut self) -> u64;
fn read_u32(&mut self) -> Result<u32, Self::Error>; fn read_u32(&mut self) -> u32;
fn read_u16(&mut self) -> Result<u16, Self::Error>; fn read_u16(&mut self) -> u16;
fn read_u8(&mut self) -> Result<u8, Self::Error>; fn read_u8(&mut self) -> u8;
fn read_isize(&mut self) -> Result<isize, Self::Error>; fn read_isize(&mut self) -> isize;
fn read_i128(&mut self) -> Result<i128, Self::Error>; fn read_i128(&mut self) -> i128;
fn read_i64(&mut self) -> Result<i64, Self::Error>; fn read_i64(&mut self) -> i64;
fn read_i32(&mut self) -> Result<i32, Self::Error>; fn read_i32(&mut self) -> i32;
fn read_i16(&mut self) -> Result<i16, Self::Error>; fn read_i16(&mut self) -> i16;
fn read_i8(&mut self) -> Result<i8, Self::Error>; fn read_i8(&mut self) -> i8;
fn read_bool(&mut self) -> Result<bool, Self::Error>; fn read_bool(&mut self) -> bool;
fn read_f64(&mut self) -> Result<f64, Self::Error>; fn read_f64(&mut self) -> f64;
fn read_f32(&mut self) -> Result<f32, Self::Error>; fn read_f32(&mut self) -> f32;
fn read_char(&mut self) -> Result<char, Self::Error>; fn read_char(&mut self) -> char;
fn read_str(&mut self) -> Result<Cow<'_, str>, Self::Error>; fn read_str(&mut self) -> Cow<'_, str>;
fn read_raw_bytes_into(&mut self, s: &mut [u8]) -> Result<(), Self::Error>; fn read_raw_bytes_into(&mut self, s: &mut [u8]);
// Compound types: // Compound types:
#[inline] #[inline]
fn read_enum<T, F>(&mut self, f: F) -> Result<T, Self::Error> fn read_enum<T, F>(&mut self, f: F) -> T
where where
F: FnOnce(&mut Self) -> Result<T, Self::Error>, F: FnOnce(&mut Self) -> T,
{ {
f(self) f(self)
} }
#[inline] #[inline]
fn read_enum_variant<T, F>(&mut self, _names: &[&str], mut f: F) -> Result<T, Self::Error> fn read_enum_variant<T, F>(&mut self, _names: &[&str], mut f: F) -> T
where where
F: FnMut(&mut Self, usize) -> Result<T, Self::Error>, F: FnMut(&mut Self, usize) -> T,
{ {
let disr = self.read_usize()?; let disr = self.read_usize();
f(self, disr) f(self, disr)
} }
#[inline] #[inline]
fn read_enum_variant_arg<T, F>(&mut self, f: F) -> Result<T, Self::Error> fn read_enum_variant_arg<T, F>(&mut self, f: F) -> T
where where
F: FnOnce(&mut Self) -> Result<T, Self::Error>, F: FnOnce(&mut Self) -> T,
{ {
f(self) f(self)
} }
#[inline] #[inline]
fn read_struct<T, F>(&mut self, f: F) -> Result<T, Self::Error> fn read_struct<T, F>(&mut self, f: F) -> T
where where
F: FnOnce(&mut Self) -> Result<T, Self::Error>, F: FnOnce(&mut Self) -> T,
{ {
f(self) f(self)
} }
#[inline] #[inline]
fn read_struct_field<T, F>(&mut self, _f_name: &str, f: F) -> Result<T, Self::Error> fn read_struct_field<T, F>(&mut self, _f_name: &str, f: F) -> T
where where
F: FnOnce(&mut Self) -> Result<T, Self::Error>, F: FnOnce(&mut Self) -> T,
{ {
f(self) f(self)
} }
#[inline] #[inline]
fn read_tuple<T, F>(&mut self, _len: usize, f: F) -> Result<T, Self::Error> fn read_tuple<T, F>(&mut self, _len: usize, f: F) -> T
where where
F: FnOnce(&mut Self) -> Result<T, Self::Error>, F: FnOnce(&mut Self) -> T,
{ {
f(self) f(self)
} }
#[inline] #[inline]
fn read_tuple_arg<T, F>(&mut self, f: F) -> Result<T, Self::Error> fn read_tuple_arg<T, F>(&mut self, f: F) -> T
where where
F: FnOnce(&mut Self) -> Result<T, Self::Error>, F: FnOnce(&mut Self) -> T,
{ {
f(self) f(self)
} }
// Specialized types: // Specialized types:
fn read_option<T, F>(&mut self, mut f: F) -> Result<T, Self::Error> fn read_option<T, F>(&mut self, mut f: F) -> T
where where
F: FnMut(&mut Self, bool) -> Result<T, Self::Error>, F: FnMut(&mut Self, bool) -> T,
{ {
self.read_enum(move |this| { self.read_enum(move |this| {
this.read_enum_variant(&["None", "Some"], move |this, idx| match idx { this.read_enum_variant(&["None", "Some"], move |this, idx| match idx {
0 => f(this, false), 0 => f(this, false),
1 => f(this, true), 1 => f(this, true),
_ => Err(this.error("read_option: expected 0 for None or 1 for Some")), _ => panic!("read_option: expected 0 for None or 1 for Some"),
}) })
}) })
} }
fn read_seq<T, F>(&mut self, f: F) -> Result<T, Self::Error> fn read_seq<T, F>(&mut self, f: F) -> T
where where
F: FnOnce(&mut Self, usize) -> Result<T, Self::Error>, F: FnOnce(&mut Self, usize) -> T,
{ {
let len = self.read_usize()?; let len = self.read_usize();
f(self, len) f(self, len)
} }
#[inline] #[inline]
fn read_seq_elt<T, F>(&mut self, f: F) -> Result<T, Self::Error> fn read_seq_elt<T, F>(&mut self, f: F) -> T
where where
F: FnOnce(&mut Self) -> Result<T, Self::Error>, F: FnOnce(&mut Self) -> T,
{ {
f(self) f(self)
} }
fn read_map<T, F>(&mut self, f: F) -> Result<T, Self::Error> fn read_map<T, F>(&mut self, f: F) -> T
where where
F: FnOnce(&mut Self, usize) -> Result<T, Self::Error>, F: FnOnce(&mut Self, usize) -> T,
{ {
let len = self.read_usize()?; let len = self.read_usize();
f(self, len) f(self, len)
} }
#[inline] #[inline]
fn read_map_elt_key<T, F>(&mut self, f: F) -> Result<T, Self::Error> fn read_map_elt_key<T, F>(&mut self, f: F) -> T
where where
F: FnOnce(&mut Self) -> Result<T, Self::Error>, F: FnOnce(&mut Self) -> T,
{ {
f(self) f(self)
} }
#[inline] #[inline]
fn read_map_elt_val<T, F>(&mut self, f: F) -> Result<T, Self::Error> fn read_map_elt_val<T, F>(&mut self, f: F) -> T
where where
F: FnOnce(&mut Self) -> Result<T, Self::Error>, F: FnOnce(&mut Self) -> T,
{ {
f(self) f(self)
} }
// Failure
fn error(&mut self, err: &str) -> Self::Error;
} }
/// Trait for types that can be serialized /// Trait for types that can be serialized
@ -340,7 +341,7 @@ pub trait Encodable<S: Encoder> {
/// * `TyDecodable` should be used for types that are only serialized in crate /// * `TyDecodable` should be used for types that are only serialized in crate
/// metadata or the incremental cache. This is most types in `rustc_middle`. /// metadata or the incremental cache. This is most types in `rustc_middle`.
pub trait Decodable<D: Decoder>: Sized { pub trait Decodable<D: Decoder>: Sized {
fn decode(d: &mut D) -> Result<Self, D::Error>; fn decode(d: &mut D) -> Self;
} }
macro_rules! direct_serialize_impls { macro_rules! direct_serialize_impls {
@ -353,7 +354,7 @@ macro_rules! direct_serialize_impls {
} }
impl<D: Decoder> Decodable<D> for $ty { impl<D: Decoder> Decodable<D> for $ty {
fn decode(d: &mut D) -> Result<$ty, D::Error> { fn decode(d: &mut D) -> $ty {
d.$read_method() d.$read_method()
} }
} }
@ -387,7 +388,7 @@ impl<S: Encoder> Encodable<S> for ! {
} }
impl<D: Decoder> Decodable<D> for ! { impl<D: Decoder> Decodable<D> for ! {
fn decode(_d: &mut D) -> Result<!, D::Error> { fn decode(_d: &mut D) -> ! {
unreachable!() unreachable!()
} }
} }
@ -399,8 +400,8 @@ impl<S: Encoder> Encodable<S> for ::std::num::NonZeroU32 {
} }
impl<D: Decoder> Decodable<D> for ::std::num::NonZeroU32 { impl<D: Decoder> Decodable<D> for ::std::num::NonZeroU32 {
fn decode(d: &mut D) -> Result<Self, D::Error> { fn decode(d: &mut D) -> Self {
d.read_u32().map(|d| ::std::num::NonZeroU32::new(d).unwrap()) ::std::num::NonZeroU32::new(d.read_u32()).unwrap()
} }
} }
@ -423,8 +424,8 @@ impl<S: Encoder> Encodable<S> for String {
} }
impl<D: Decoder> Decodable<D> for String { impl<D: Decoder> Decodable<D> for String {
fn decode(d: &mut D) -> Result<String, D::Error> { fn decode(d: &mut D) -> String {
Ok(d.read_str()?.into_owned()) d.read_str().into_owned()
} }
} }
@ -435,7 +436,7 @@ impl<S: Encoder> Encodable<S> for () {
} }
impl<D: Decoder> Decodable<D> for () { impl<D: Decoder> Decodable<D> for () {
fn decode(d: &mut D) -> Result<(), D::Error> { fn decode(d: &mut D) -> () {
d.read_unit() d.read_unit()
} }
} }
@ -447,16 +448,16 @@ impl<S: Encoder, T> Encodable<S> for PhantomData<T> {
} }
impl<D: Decoder, T> Decodable<D> for PhantomData<T> { impl<D: Decoder, T> Decodable<D> for PhantomData<T> {
fn decode(d: &mut D) -> Result<PhantomData<T>, D::Error> { fn decode(d: &mut D) -> PhantomData<T> {
d.read_unit()?; d.read_unit();
Ok(PhantomData) PhantomData
} }
} }
impl<D: Decoder, T: Decodable<D>> Decodable<D> for Box<[T]> { impl<D: Decoder, T: Decodable<D>> Decodable<D> for Box<[T]> {
fn decode(d: &mut D) -> Result<Box<[T]>, D::Error> { fn decode(d: &mut D) -> Box<[T]> {
let v: Vec<T> = Decodable::decode(d)?; let v: Vec<T> = Decodable::decode(d);
Ok(v.into_boxed_slice()) v.into_boxed_slice()
} }
} }
@ -467,8 +468,8 @@ impl<S: Encoder, T: Encodable<S>> Encodable<S> for Rc<T> {
} }
impl<D: Decoder, T: Decodable<D>> Decodable<D> for Rc<T> { impl<D: Decoder, T: Decodable<D>> Decodable<D> for Rc<T> {
fn decode(d: &mut D) -> Result<Rc<T>, D::Error> { fn decode(d: &mut D) -> Rc<T> {
Ok(Rc::new(Decodable::decode(d)?)) Rc::new(Decodable::decode(d))
} }
} }
@ -491,13 +492,22 @@ impl<S: Encoder, T: Encodable<S>> Encodable<S> for Vec<T> {
} }
impl<D: Decoder, T: Decodable<D>> Decodable<D> for Vec<T> { impl<D: Decoder, T: Decodable<D>> Decodable<D> for Vec<T> {
default fn decode(d: &mut D) -> Result<Vec<T>, D::Error> { default fn decode(d: &mut D) -> Vec<T> {
d.read_seq(|d, len| { d.read_seq(|d, len| {
let mut v = Vec::with_capacity(len); // SAFETY: we set the capacity in advance, only write elements, and
for _ in 0..len { // only set the length at the end once the writing has succeeded.
v.push(d.read_seq_elt(|d| Decodable::decode(d))?); let mut vec = Vec::with_capacity(len);
unsafe {
let ptr: *mut T = vec.as_mut_ptr();
for i in 0..len {
std::ptr::write(
ptr.offset(i as isize),
d.read_seq_elt(|d| Decodable::decode(d)),
);
}
vec.set_len(len);
} }
Ok(v) vec
}) })
} }
} }
@ -510,14 +520,14 @@ impl<S: Encoder, T: Encodable<S>, const N: usize> Encodable<S> for [T; N] {
} }
impl<D: Decoder, const N: usize> Decodable<D> for [u8; N] { impl<D: Decoder, const N: usize> Decodable<D> for [u8; N] {
fn decode(d: &mut D) -> Result<[u8; N], D::Error> { fn decode(d: &mut D) -> [u8; N] {
d.read_seq(|d, len| { d.read_seq(|d, len| {
assert!(len == N); assert!(len == N);
let mut v = [0u8; N]; let mut v = [0u8; N];
for i in 0..len { for i in 0..len {
v[i] = d.read_seq_elt(|d| Decodable::decode(d))?; v[i] = d.read_seq_elt(|d| Decodable::decode(d));
} }
Ok(v) v
}) })
} }
} }
@ -536,9 +546,9 @@ impl<D: Decoder, T: Decodable<D> + ToOwned> Decodable<D> for Cow<'static, [T]>
where where
[T]: ToOwned<Owned = Vec<T>>, [T]: ToOwned<Owned = Vec<T>>,
{ {
fn decode(d: &mut D) -> Result<Cow<'static, [T]>, D::Error> { fn decode(d: &mut D) -> Cow<'static, [T]> {
let v: Vec<T> = Decodable::decode(d)?; let v: Vec<T> = Decodable::decode(d);
Ok(Cow::Owned(v)) Cow::Owned(v)
} }
} }
@ -552,8 +562,8 @@ impl<S: Encoder, T: Encodable<S>> Encodable<S> for Option<T> {
} }
impl<D: Decoder, T: Decodable<D>> Decodable<D> for Option<T> { impl<D: Decoder, T: Decodable<D>> Decodable<D> for Option<T> {
fn decode(d: &mut D) -> Result<Option<T>, D::Error> { fn decode(d: &mut D) -> Option<T> {
d.read_option(|d, b| if b { Ok(Some(Decodable::decode(d)?)) } else { Ok(None) }) d.read_option(|d, b| if b { Some(Decodable::decode(d)) } else { None })
} }
} }
@ -571,17 +581,12 @@ impl<S: Encoder, T1: Encodable<S>, T2: Encodable<S>> Encodable<S> for Result<T1,
} }
impl<D: Decoder, T1: Decodable<D>, T2: Decodable<D>> Decodable<D> for Result<T1, T2> { impl<D: Decoder, T1: Decodable<D>, T2: Decodable<D>> Decodable<D> for Result<T1, T2> {
fn decode(d: &mut D) -> Result<Result<T1, T2>, D::Error> { fn decode(d: &mut D) -> Result<T1, T2> {
d.read_enum(|d| { d.read_enum(|d| {
d.read_enum_variant(&["Ok", "Err"], |d, disr| match disr { d.read_enum_variant(&["Ok", "Err"], |d, disr| match disr {
0 => Ok(Ok(d.read_enum_variant_arg(|d| T1::decode(d))?)), 0 => Ok(d.read_enum_variant_arg(|d| T1::decode(d))),
1 => Ok(Err(d.read_enum_variant_arg(|d| T2::decode(d))?)), 1 => Err(d.read_enum_variant_arg(|d| T2::decode(d))),
_ => { _ => panic!("Encountered invalid discriminant while decoding `Result`."),
panic!(
"Encountered invalid discriminant while \
decoding `Result`."
);
}
}) })
}) })
} }
@ -609,13 +614,13 @@ macro_rules! tuple {
( $($name:ident,)+ ) => ( ( $($name:ident,)+ ) => (
impl<D: Decoder, $($name: Decodable<D>),+> Decodable<D> for ($($name,)+) { impl<D: Decoder, $($name: Decodable<D>),+> Decodable<D> for ($($name,)+) {
#[allow(non_snake_case)] #[allow(non_snake_case)]
fn decode(d: &mut D) -> Result<($($name,)+), D::Error> { fn decode(d: &mut D) -> ($($name,)+) {
let len: usize = count!($($name)+); let len: usize = count!($($name)+);
d.read_tuple(len, |d| { d.read_tuple(len, |d| {
let ret = ($(d.read_tuple_arg(|d| -> Result<$name, D::Error> { let ret = ($(d.read_tuple_arg(|d| -> $name {
Decodable::decode(d) Decodable::decode(d)
})?,)+); }),)+);
Ok(ret) ret
}) })
} }
} }
@ -651,9 +656,9 @@ impl<S: Encoder> Encodable<S> for path::PathBuf {
} }
impl<D: Decoder> Decodable<D> for path::PathBuf { impl<D: Decoder> Decodable<D> for path::PathBuf {
fn decode(d: &mut D) -> Result<path::PathBuf, D::Error> { fn decode(d: &mut D) -> path::PathBuf {
let bytes: String = Decodable::decode(d)?; let bytes: String = Decodable::decode(d);
Ok(path::PathBuf::from(bytes)) path::PathBuf::from(bytes)
} }
} }
@ -664,8 +669,8 @@ impl<S: Encoder, T: Encodable<S> + Copy> Encodable<S> for Cell<T> {
} }
impl<D: Decoder, T: Decodable<D> + Copy> Decodable<D> for Cell<T> { impl<D: Decoder, T: Decodable<D> + Copy> Decodable<D> for Cell<T> {
fn decode(d: &mut D) -> Result<Cell<T>, D::Error> { fn decode(d: &mut D) -> Cell<T> {
Ok(Cell::new(Decodable::decode(d)?)) Cell::new(Decodable::decode(d))
} }
} }
@ -681,8 +686,8 @@ impl<S: Encoder, T: Encodable<S>> Encodable<S> for RefCell<T> {
} }
impl<D: Decoder, T: Decodable<D>> Decodable<D> for RefCell<T> { impl<D: Decoder, T: Decodable<D>> Decodable<D> for RefCell<T> {
fn decode(d: &mut D) -> Result<RefCell<T>, D::Error> { fn decode(d: &mut D) -> RefCell<T> {
Ok(RefCell::new(Decodable::decode(d)?)) RefCell::new(Decodable::decode(d))
} }
} }
@ -693,8 +698,8 @@ impl<S: Encoder, T: Encodable<S>> Encodable<S> for Arc<T> {
} }
impl<D: Decoder, T: Decodable<D>> Decodable<D> for Arc<T> { impl<D: Decoder, T: Decodable<D>> Decodable<D> for Arc<T> {
fn decode(d: &mut D) -> Result<Arc<T>, D::Error> { fn decode(d: &mut D) -> Arc<T> {
Ok(Arc::new(Decodable::decode(d)?)) Arc::new(Decodable::decode(d))
} }
} }
@ -704,7 +709,7 @@ impl<S: Encoder, T: ?Sized + Encodable<S>> Encodable<S> for Box<T> {
} }
} }
impl<D: Decoder, T: Decodable<D>> Decodable<D> for Box<T> { impl<D: Decoder, T: Decodable<D>> Decodable<D> for Box<T> {
fn decode(d: &mut D) -> Result<Box<T>, D::Error> { fn decode(d: &mut D) -> Box<T> {
Ok(Box::new(Decodable::decode(d)?)) Box::new(Decodable::decode(d))
} }
} }

View file

@ -1,14 +1,10 @@
#![allow(rustc::internal)] #![allow(rustc::internal)]
use json::DecoderError::*;
use json::ErrorCode::*; use json::ErrorCode::*;
use json::Json::*; use json::Json::*;
use json::JsonEvent::*; use json::JsonEvent::*;
use json::ParserError::*; use json::ParserError::*;
use json::{ use json::{from_str, Decoder, Encoder, EncoderError, Json, JsonEvent, Parser, StackElement};
from_str, DecodeResult, Decoder, DecoderError, Encoder, EncoderError, Json, JsonEvent, Parser,
StackElement,
};
use rustc_macros::{Decodable, Encodable}; use rustc_macros::{Decodable, Encodable};
use rustc_serialize::json; use rustc_serialize::json;
use rustc_serialize::{Decodable, Encodable}; use rustc_serialize::{Decodable, Encodable};
@ -26,27 +22,27 @@ struct OptionData {
#[test] #[test]
fn test_decode_option_none() { fn test_decode_option_none() {
let s = "{}"; let s = "{}";
let obj: OptionData = json::decode(s).unwrap(); let obj: OptionData = json::decode(s);
assert_eq!(obj, OptionData { opt: None }); assert_eq!(obj, OptionData { opt: None });
} }
#[test] #[test]
fn test_decode_option_some() { fn test_decode_option_some() {
let s = "{ \"opt\": 10 }"; let s = "{ \"opt\": 10 }";
let obj: OptionData = json::decode(s).unwrap(); let obj: OptionData = json::decode(s);
assert_eq!(obj, OptionData { opt: Some(10) }); assert_eq!(obj, OptionData { opt: Some(10) });
} }
#[test] #[test]
fn test_decode_option_malformed() { #[should_panic(expected = r#"ExpectedError("Number", "[]")"#)]
check_err::<OptionData>( fn test_decode_option_malformed1() {
"{ \"opt\": [] }", check_err::<OptionData>(r#"{ "opt": [] }"#);
ExpectedError("Number".to_string(), "[]".to_string()), }
);
check_err::<OptionData>( #[test]
"{ \"opt\": false }", #[should_panic(expected = r#"ExpectedError("Number", "false")"#)]
ExpectedError("Number".to_string(), "false".to_string()), fn test_decode_option_malformed2() {
); check_err::<OptionData>(r#"{ "opt": false }"#);
} }
#[derive(PartialEq, Encodable, Decodable, Debug)] #[derive(PartialEq, Encodable, Decodable, Debug)]
@ -329,13 +325,13 @@ fn test_read_identifiers() {
#[test] #[test]
fn test_decode_identifiers() { fn test_decode_identifiers() {
let v: () = json::decode("null").unwrap(); let v: () = json::decode("null");
assert_eq!(v, ()); assert_eq!(v, ());
let v: bool = json::decode("true").unwrap(); let v: bool = json::decode("true");
assert_eq!(v, true); assert_eq!(v, true);
let v: bool = json::decode("false").unwrap(); let v: bool = json::decode("false");
assert_eq!(v, false); assert_eq!(v, false);
} }
@ -368,42 +364,42 @@ fn test_read_number() {
} }
#[test] #[test]
#[should_panic(expected = r#"ExpectedError("Integer", "765.25")"#)]
fn test_decode_numbers() { fn test_decode_numbers() {
let v: f64 = json::decode("3").unwrap(); let v: f64 = json::decode("3");
assert_eq!(v, 3.0); assert_eq!(v, 3.0);
let v: f64 = json::decode("3.1").unwrap(); let v: f64 = json::decode("3.1");
assert_eq!(v, 3.1); assert_eq!(v, 3.1);
let v: f64 = json::decode("-1.2").unwrap(); let v: f64 = json::decode("-1.2");
assert_eq!(v, -1.2); assert_eq!(v, -1.2);
let v: f64 = json::decode("0.4").unwrap(); let v: f64 = json::decode("0.4");
assert_eq!(v, 0.4); assert_eq!(v, 0.4);
let v: f64 = json::decode("0.4e5").unwrap(); let v: f64 = json::decode("0.4e5");
assert_eq!(v, 0.4e5); assert_eq!(v, 0.4e5);
let v: f64 = json::decode("0.4e15").unwrap(); let v: f64 = json::decode("0.4e15");
assert_eq!(v, 0.4e15); assert_eq!(v, 0.4e15);
let v: f64 = json::decode("0.4e-01").unwrap(); let v: f64 = json::decode("0.4e-01");
assert_eq!(v, 0.4e-01); assert_eq!(v, 0.4e-01);
let v: u64 = json::decode("0").unwrap(); let v: u64 = json::decode("0");
assert_eq!(v, 0); assert_eq!(v, 0);
let v: u64 = json::decode("18446744073709551615").unwrap(); let v: u64 = json::decode("18446744073709551615");
assert_eq!(v, u64::MAX); assert_eq!(v, u64::MAX);
let v: i64 = json::decode("-9223372036854775808").unwrap(); let v: i64 = json::decode("-9223372036854775808");
assert_eq!(v, i64::MIN); assert_eq!(v, i64::MIN);
let v: i64 = json::decode("9223372036854775807").unwrap(); let v: i64 = json::decode("9223372036854775807");
assert_eq!(v, i64::MAX); assert_eq!(v, i64::MAX);
let res: DecodeResult<i64> = json::decode("765.25"); json::decode::<i64>("765.25");
assert_eq!(res, Err(ExpectedError("Integer".to_string(), "765.25".to_string())));
} }
#[test] #[test]
@ -438,7 +434,7 @@ fn test_decode_str() {
]; ];
for (i, o) in s { for (i, o) in s {
let v: string::String = json::decode(i).unwrap(); let v: string::String = json::decode(i);
assert_eq!(v, o); assert_eq!(v, o);
} }
} }
@ -463,39 +459,41 @@ fn test_read_array() {
#[test] #[test]
fn test_decode_array() { fn test_decode_array() {
let v: Vec<()> = json::decode("[]").unwrap(); let v: Vec<()> = json::decode("[]");
assert_eq!(v, []); assert_eq!(v, []);
let v: Vec<()> = json::decode("[null]").unwrap(); let v: Vec<()> = json::decode("[null]");
assert_eq!(v, [()]); assert_eq!(v, [()]);
let v: Vec<bool> = json::decode("[true]").unwrap(); let v: Vec<bool> = json::decode("[true]");
assert_eq!(v, [true]); assert_eq!(v, [true]);
let v: Vec<isize> = json::decode("[3, 1]").unwrap(); let v: Vec<isize> = json::decode("[3, 1]");
assert_eq!(v, [3, 1]); assert_eq!(v, [3, 1]);
let v: Vec<Vec<usize>> = json::decode("[[3], [1, 2]]").unwrap(); let v: Vec<Vec<usize>> = json::decode("[[3], [1, 2]]");
assert_eq!(v, [vec![3], vec![1, 2]]); assert_eq!(v, [vec![3], vec![1, 2]]);
} }
#[test] #[test]
fn test_decode_tuple() { fn test_decode_tuple() {
let t: (usize, usize, usize) = json::decode("[1, 2, 3]").unwrap(); let t: (usize, usize, usize) = json::decode("[1, 2, 3]");
assert_eq!(t, (1, 2, 3)); assert_eq!(t, (1, 2, 3));
let t: (usize, string::String) = json::decode("[1, \"two\"]").unwrap(); let t: (usize, string::String) = json::decode("[1, \"two\"]");
assert_eq!(t, (1, "two".to_string())); assert_eq!(t, (1, "two".to_string()));
} }
#[test] #[test]
#[should_panic]
fn test_decode_tuple_malformed_types() { fn test_decode_tuple_malformed_types() {
assert!(json::decode::<(usize, string::String)>("[1, 2]").is_err()); json::decode::<(usize, string::String)>("[1, 2]");
} }
#[test] #[test]
#[should_panic]
fn test_decode_tuple_malformed_length() { fn test_decode_tuple_malformed_length() {
assert!(json::decode::<(usize, usize)>("[1, 2, 3]").is_err()); json::decode::<(usize, usize)>("[1, 2, 3]");
} }
#[test] #[test]
@ -562,7 +560,7 @@ fn test_decode_struct() {
] ]
}"; }";
let v: Outer = json::decode(s).unwrap(); let v: Outer = json::decode(s);
assert_eq!( assert_eq!(
v, v,
Outer { inner: vec![Inner { a: (), b: 2, c: vec!["abc".to_string(), "xyz".to_string()] }] } Outer { inner: vec![Inner { a: (), b: 2, c: vec!["abc".to_string(), "xyz".to_string()] }] }
@ -577,7 +575,7 @@ struct FloatStruct {
#[test] #[test]
fn test_decode_struct_with_nan() { fn test_decode_struct_with_nan() {
let s = "{\"f\":null,\"a\":[null,123]}"; let s = "{\"f\":null,\"a\":[null,123]}";
let obj: FloatStruct = json::decode(s).unwrap(); let obj: FloatStruct = json::decode(s);
assert!(obj.f.is_nan()); assert!(obj.f.is_nan());
assert!(obj.a[0].is_nan()); assert!(obj.a[0].is_nan());
assert_eq!(obj.a[1], 123f64); assert_eq!(obj.a[1], 123f64);
@ -585,20 +583,20 @@ fn test_decode_struct_with_nan() {
#[test] #[test]
fn test_decode_option() { fn test_decode_option() {
let value: Option<string::String> = json::decode("null").unwrap(); let value: Option<string::String> = json::decode("null");
assert_eq!(value, None); assert_eq!(value, None);
let value: Option<string::String> = json::decode("\"jodhpurs\"").unwrap(); let value: Option<string::String> = json::decode("\"jodhpurs\"");
assert_eq!(value, Some("jodhpurs".to_string())); assert_eq!(value, Some("jodhpurs".to_string()));
} }
#[test] #[test]
fn test_decode_enum() { fn test_decode_enum() {
let value: Animal = json::decode("\"Dog\"").unwrap(); let value: Animal = json::decode("\"Dog\"");
assert_eq!(value, Dog); assert_eq!(value, Dog);
let s = "{\"variant\":\"Frog\",\"fields\":[\"Henry\",349]}"; let s = "{\"variant\":\"Frog\",\"fields\":[\"Henry\",349]}";
let value: Animal = json::decode(s).unwrap(); let value: Animal = json::decode(s);
assert_eq!(value, Frog("Henry".to_string(), 349)); assert_eq!(value, Frog("Henry".to_string(), 349));
} }
@ -606,7 +604,7 @@ fn test_decode_enum() {
fn test_decode_map() { fn test_decode_map() {
let s = "{\"a\": \"Dog\", \"b\": {\"variant\":\"Frog\",\ let s = "{\"a\": \"Dog\", \"b\": {\"variant\":\"Frog\",\
\"fields\":[\"Henry\", 349]}}"; \"fields\":[\"Henry\", 349]}}";
let mut map: BTreeMap<string::String, Animal> = json::decode(s).unwrap(); let mut map: BTreeMap<string::String, Animal> = json::decode(s);
assert_eq!(map.remove(&"a".to_string()), Some(Dog)); assert_eq!(map.remove(&"a".to_string()), Some(Dog));
assert_eq!(map.remove(&"b".to_string()), Some(Frog("Henry".to_string(), 349))); assert_eq!(map.remove(&"b".to_string()), Some(Frog("Henry".to_string(), 349)));
@ -630,59 +628,65 @@ enum DecodeEnum {
A(f64), A(f64),
B(string::String), B(string::String),
} }
fn check_err<T: Decodable<Decoder>>(to_parse: &'static str, expected: DecoderError) { fn check_err<T: Decodable<Decoder>>(to_parse: &str) {
let res: DecodeResult<T> = match from_str(to_parse) { let json = from_str(to_parse).unwrap();
Err(e) => Err(ParseError(e)), let _: T = Decodable::decode(&mut Decoder::new(json));
Ok(json) => Decodable::decode(&mut Decoder::new(json)),
};
match res {
Ok(_) => panic!("`{:?}` parsed & decoded ok, expecting error `{:?}`", to_parse, expected),
Err(ParseError(e)) => panic!("`{:?}` is not valid json: {:?}", to_parse, e),
Err(e) => {
assert_eq!(e, expected);
}
}
} }
#[test] #[test]
fn test_decode_errors_struct() { #[should_panic(expected = r#"ExpectedError("Object", "[]")"#)]
check_err::<DecodeStruct>("[]", ExpectedError("Object".to_string(), "[]".to_string())); fn test_decode_errors_struct1() {
check_err::<DecodeStruct>( check_err::<DecodeStruct>("[]");
"{\"x\": true, \"y\": true, \"z\": \"\", \"w\": []}",
ExpectedError("Number".to_string(), "true".to_string()),
);
check_err::<DecodeStruct>(
"{\"x\": 1, \"y\": [], \"z\": \"\", \"w\": []}",
ExpectedError("Boolean".to_string(), "[]".to_string()),
);
check_err::<DecodeStruct>(
"{\"x\": 1, \"y\": true, \"z\": {}, \"w\": []}",
ExpectedError("String".to_string(), "{}".to_string()),
);
check_err::<DecodeStruct>(
"{\"x\": 1, \"y\": true, \"z\": \"\", \"w\": null}",
ExpectedError("Array".to_string(), "null".to_string()),
);
check_err::<DecodeStruct>(
"{\"x\": 1, \"y\": true, \"z\": \"\"}",
MissingFieldError("w".to_string()),
);
} }
#[test] #[test]
fn test_decode_errors_enum() { #[should_panic(expected = r#"ExpectedError("Number", "true")"#)]
check_err::<DecodeEnum>("{}", MissingFieldError("variant".to_string())); fn test_decode_errors_struct2() {
check_err::<DecodeEnum>( check_err::<DecodeStruct>(r#"{"x": true, "y": true, "z": "", "w": []}"#);
"{\"variant\": 1}", }
ExpectedError("String".to_string(), "1".to_string()), #[test]
); #[should_panic(expected = r#"ExpectedError("Boolean", "[]")"#)]
check_err::<DecodeEnum>("{\"variant\": \"A\"}", MissingFieldError("fields".to_string())); fn test_decode_errors_struct3() {
check_err::<DecodeEnum>( check_err::<DecodeStruct>(r#"{"x": 1, "y": [], "z": "", "w": []}"#);
"{\"variant\": \"A\", \"fields\": null}", }
ExpectedError("Array".to_string(), "null".to_string()), #[test]
); #[should_panic(expected = r#"ExpectedError("String", "{}")"#)]
check_err::<DecodeEnum>( fn test_decode_errors_struct4() {
"{\"variant\": \"C\", \"fields\": []}", check_err::<DecodeStruct>(r#"{"x": 1, "y": true, "z": {}, "w": []}"#);
UnknownVariantError("C".to_string()), }
); #[test]
#[should_panic(expected = r#"ExpectedError("Array", "null")"#)]
fn test_decode_errors_struct5() {
check_err::<DecodeStruct>(r#"{"x": 1, "y": true, "z": "", "w": null}"#);
}
#[test]
#[should_panic(expected = r#"ExpectedError("Array", "null")"#)]
fn test_decode_errors_struct6() {
check_err::<DecodeStruct>(r#"{"x": 1, "y": true, "z": ""}"#);
}
#[test]
#[should_panic(expected = r#"MissingFieldError("variant")"#)]
fn test_decode_errors_enum1() {
check_err::<DecodeEnum>(r#"{}"#);
}
#[test]
#[should_panic(expected = r#"ExpectedError("String", "1")"#)]
fn test_decode_errors_enum2() {
check_err::<DecodeEnum>(r#"{"variant": 1}"#);
}
#[test]
#[should_panic(expected = r#"MissingFieldError("fields")"#)]
fn test_decode_errors_enum3() {
check_err::<DecodeEnum>(r#"{"variant": "A"}"#);
}
#[test]
#[should_panic(expected = r#"ExpectedError("Array", "null")"#)]
fn test_decode_errors_enum4() {
check_err::<DecodeEnum>(r#"{"variant": "A", "fields": null}"#);
}
#[test]
#[should_panic(expected = r#"UnknownVariantError("C")"#)]
fn test_decode_errors_enum5() {
check_err::<DecodeEnum>(r#"{"variant": "C", "fields": []}"#);
} }
#[test] #[test]
@ -944,7 +948,7 @@ fn test_hashmap_with_enum_key() {
map.insert(Enum::Foo, 0); map.insert(Enum::Foo, 0);
let result = json::encode(&map).unwrap(); let result = json::encode(&map).unwrap();
assert_eq!(&result[..], r#"{"Foo":0}"#); assert_eq!(&result[..], r#"{"Foo":0}"#);
let decoded: HashMap<Enum, _> = json::decode(&result).unwrap(); let decoded: HashMap<Enum, _> = json::decode(&result);
assert_eq!(map, decoded); assert_eq!(map, decoded);
} }
@ -957,10 +961,11 @@ fn test_hashmap_with_numeric_key_can_handle_double_quote_delimited_key() {
Ok(o) => o, Ok(o) => o,
}; };
let mut decoder = Decoder::new(json_obj); let mut decoder = Decoder::new(json_obj);
let _hm: HashMap<usize, bool> = Decodable::decode(&mut decoder).unwrap(); let _hm: HashMap<usize, bool> = Decodable::decode(&mut decoder);
} }
#[test] #[test]
#[should_panic(expected = r#"ExpectedError("Number", "a")"#)]
fn test_hashmap_with_numeric_key_will_error_with_string_keys() { fn test_hashmap_with_numeric_key_will_error_with_string_keys() {
use std::collections::HashMap; use std::collections::HashMap;
let json_str = "{\"a\":true}"; let json_str = "{\"a\":true}";
@ -969,8 +974,7 @@ fn test_hashmap_with_numeric_key_will_error_with_string_keys() {
Ok(o) => o, Ok(o) => o,
}; };
let mut decoder = Decoder::new(json_obj); let mut decoder = Decoder::new(json_obj);
let result: Result<HashMap<usize, bool>, DecoderError> = Decodable::decode(&mut decoder); let _: HashMap<usize, bool> = Decodable::decode(&mut decoder);
assert_eq!(result, Err(ExpectedError("Number".to_string(), "a".to_string())));
} }
fn assert_stream_equal(src: &str, expected: Vec<(JsonEvent, Vec<StackElement<'_>>)>) { fn assert_stream_equal(src: &str, expected: Vec<(JsonEvent, Vec<StackElement<'_>>)>) {

View file

@ -41,7 +41,7 @@ fn check_round_trip<T: Encodable<Encoder> + for<'a> Decodable<Decoder<'a>> + Par
let mut decoder = Decoder::new(&data[..], 0); let mut decoder = Decoder::new(&data[..], 0);
for value in values { for value in values {
let decoded = Decodable::decode(&mut decoder).unwrap(); let decoded = Decodable::decode(&mut decoder);
assert_eq!(value, decoded); assert_eq!(value, decoded);
} }
} }

View file

@ -47,8 +47,8 @@ impl<E: Encoder> Encodable<E> for CrateNum {
} }
impl<D: Decoder> Decodable<D> for CrateNum { impl<D: Decoder> Decodable<D> for CrateNum {
default fn decode(d: &mut D) -> Result<CrateNum, D::Error> { default fn decode(d: &mut D) -> CrateNum {
Ok(CrateNum::from_u32(d.read_u32()?)) CrateNum::from_u32(d.read_u32())
} }
} }
@ -209,7 +209,7 @@ impl<E: Encoder> Encodable<E> for DefIndex {
} }
impl<D: Decoder> Decodable<D> for DefIndex { impl<D: Decoder> Decodable<D> for DefIndex {
default fn decode(_: &mut D) -> Result<DefIndex, D::Error> { default fn decode(_: &mut D) -> DefIndex {
panic!("cannot decode `DefIndex` with `{}`", std::any::type_name::<D>()); panic!("cannot decode `DefIndex` with `{}`", std::any::type_name::<D>());
} }
} }
@ -298,12 +298,10 @@ impl<E: Encoder> Encodable<E> for DefId {
} }
impl<D: Decoder> Decodable<D> for DefId { impl<D: Decoder> Decodable<D> for DefId {
default fn decode(d: &mut D) -> Result<DefId, D::Error> { default fn decode(d: &mut D) -> DefId {
d.read_struct(|d| { d.read_struct(|d| DefId {
Ok(DefId { krate: d.read_struct_field("krate", Decodable::decode),
krate: d.read_struct_field("krate", Decodable::decode)?, index: d.read_struct_field("index", Decodable::decode),
index: d.read_struct_field("index", Decodable::decode)?,
})
}) })
} }
} }
@ -378,8 +376,8 @@ impl<E: Encoder> Encodable<E> for LocalDefId {
} }
impl<D: Decoder> Decodable<D> for LocalDefId { impl<D: Decoder> Decodable<D> for LocalDefId {
fn decode(d: &mut D) -> Result<LocalDefId, D::Error> { fn decode(d: &mut D) -> LocalDefId {
DefId::decode(d).map(|d| d.expect_local()) DefId::decode(d).expect_local()
} }
} }

View file

@ -1314,19 +1314,16 @@ pub fn decode_expn_id(
// to track which `SyntaxContext`s we have already decoded. // to track which `SyntaxContext`s we have already decoded.
// The provided closure will be invoked to deserialize a `SyntaxContextData` // The provided closure will be invoked to deserialize a `SyntaxContextData`
// if we haven't already seen the id of the `SyntaxContext` we are deserializing. // if we haven't already seen the id of the `SyntaxContext` we are deserializing.
pub fn decode_syntax_context< pub fn decode_syntax_context<D: Decoder, F: FnOnce(&mut D, u32) -> SyntaxContextData>(
D: Decoder,
F: FnOnce(&mut D, u32) -> Result<SyntaxContextData, D::Error>,
>(
d: &mut D, d: &mut D,
context: &HygieneDecodeContext, context: &HygieneDecodeContext,
decode_data: F, decode_data: F,
) -> Result<SyntaxContext, D::Error> { ) -> SyntaxContext {
let raw_id: u32 = Decodable::decode(d)?; let raw_id: u32 = Decodable::decode(d);
if raw_id == 0 { if raw_id == 0 {
debug!("decode_syntax_context: deserialized root"); debug!("decode_syntax_context: deserialized root");
// The root is special // The root is special
return Ok(SyntaxContext::root()); return SyntaxContext::root();
} }
let outer_ctxts = &context.remapped_ctxts; let outer_ctxts = &context.remapped_ctxts;
@ -1334,7 +1331,7 @@ pub fn decode_syntax_context<
// Ensure that the lock() temporary is dropped early // Ensure that the lock() temporary is dropped early
{ {
if let Some(ctxt) = outer_ctxts.lock().get(raw_id as usize).copied().flatten() { if let Some(ctxt) = outer_ctxts.lock().get(raw_id as usize).copied().flatten() {
return Ok(ctxt); return ctxt;
} }
} }
@ -1364,7 +1361,7 @@ pub fn decode_syntax_context<
// Don't try to decode data while holding the lock, since we need to // Don't try to decode data while holding the lock, since we need to
// be able to recursively decode a SyntaxContext // be able to recursively decode a SyntaxContext
let mut ctxt_data = decode_data(d, raw_id)?; let mut ctxt_data = decode_data(d, raw_id);
// Reset `dollar_crate_name` so that it will be updated by `update_dollar_crate_names` // Reset `dollar_crate_name` so that it will be updated by `update_dollar_crate_names`
// We don't care what the encoding crate set this to - we want to resolve it // We don't care what the encoding crate set this to - we want to resolve it
// from the perspective of the current compilation session // from the perspective of the current compilation session
@ -1380,7 +1377,7 @@ pub fn decode_syntax_context<
assert_eq!(dummy.dollar_crate_name, kw::Empty); assert_eq!(dummy.dollar_crate_name, kw::Empty);
}); });
Ok(new_ctxt) new_ctxt
} }
fn for_all_ctxts_in<E, F: FnMut(u32, SyntaxContext, &SyntaxContextData) -> Result<(), E>>( fn for_all_ctxts_in<E, F: FnMut(u32, SyntaxContext, &SyntaxContextData) -> Result<(), E>>(
@ -1422,13 +1419,13 @@ impl<E: Encoder> Encodable<E> for ExpnId {
} }
impl<D: Decoder> Decodable<D> for LocalExpnId { impl<D: Decoder> Decodable<D> for LocalExpnId {
fn decode(d: &mut D) -> Result<Self, D::Error> { fn decode(d: &mut D) -> Self {
ExpnId::decode(d).map(ExpnId::expect_local) ExpnId::expect_local(ExpnId::decode(d))
} }
} }
impl<D: Decoder> Decodable<D> for ExpnId { impl<D: Decoder> Decodable<D> for ExpnId {
default fn decode(_: &mut D) -> Result<Self, D::Error> { default fn decode(_: &mut D) -> Self {
panic!("cannot decode `ExpnId` with `{}`", std::any::type_name::<D>()); panic!("cannot decode `ExpnId` with `{}`", std::any::type_name::<D>());
} }
} }
@ -1451,7 +1448,7 @@ impl<E: Encoder> Encodable<E> for SyntaxContext {
} }
impl<D: Decoder> Decodable<D> for SyntaxContext { impl<D: Decoder> Decodable<D> for SyntaxContext {
default fn decode(_: &mut D) -> Result<Self, D::Error> { default fn decode(_: &mut D) -> Self {
panic!("cannot decode `SyntaxContext` with `{}`", std::any::type_name::<D>()); panic!("cannot decode `SyntaxContext` with `{}`", std::any::type_name::<D>());
} }
} }

View file

@ -975,12 +975,12 @@ impl<E: Encoder> Encodable<E> for Span {
} }
} }
impl<D: Decoder> Decodable<D> for Span { impl<D: Decoder> Decodable<D> for Span {
default fn decode(s: &mut D) -> Result<Span, D::Error> { default fn decode(s: &mut D) -> Span {
s.read_struct(|d| { s.read_struct(|d| {
let lo = d.read_struct_field("lo", Decodable::decode)?; let lo = d.read_struct_field("lo", Decodable::decode);
let hi = d.read_struct_field("hi", Decodable::decode)?; let hi = d.read_struct_field("hi", Decodable::decode);
Ok(Span::new(lo, hi, SyntaxContext::root(), None)) Span::new(lo, hi, SyntaxContext::root(), None)
}) })
} }
} }
@ -1448,30 +1448,30 @@ impl<S: Encoder> Encodable<S> for SourceFile {
} }
impl<D: Decoder> Decodable<D> for SourceFile { impl<D: Decoder> Decodable<D> for SourceFile {
fn decode(d: &mut D) -> Result<SourceFile, D::Error> { fn decode(d: &mut D) -> SourceFile {
d.read_struct(|d| { d.read_struct(|d| {
let name: FileName = d.read_struct_field("name", |d| Decodable::decode(d))?; let name: FileName = d.read_struct_field("name", |d| Decodable::decode(d));
let src_hash: SourceFileHash = let src_hash: SourceFileHash =
d.read_struct_field("src_hash", |d| Decodable::decode(d))?; d.read_struct_field("src_hash", |d| Decodable::decode(d));
let start_pos: BytePos = d.read_struct_field("start_pos", |d| Decodable::decode(d))?; let start_pos: BytePos = d.read_struct_field("start_pos", |d| Decodable::decode(d));
let end_pos: BytePos = d.read_struct_field("end_pos", |d| Decodable::decode(d))?; let end_pos: BytePos = d.read_struct_field("end_pos", |d| Decodable::decode(d));
let lines: Vec<BytePos> = d.read_struct_field("lines", |d| { let lines: Vec<BytePos> = d.read_struct_field("lines", |d| {
let num_lines: u32 = Decodable::decode(d)?; let num_lines: u32 = Decodable::decode(d);
let mut lines = Vec::with_capacity(num_lines as usize); let mut lines = Vec::with_capacity(num_lines as usize);
if num_lines > 0 { if num_lines > 0 {
// Read the number of bytes used per diff. // Read the number of bytes used per diff.
let bytes_per_diff: u8 = Decodable::decode(d)?; let bytes_per_diff: u8 = Decodable::decode(d);
// Read the first element. // Read the first element.
let mut line_start: BytePos = Decodable::decode(d)?; let mut line_start: BytePos = Decodable::decode(d);
lines.push(line_start); lines.push(line_start);
for _ in 1..num_lines { for _ in 1..num_lines {
let diff = match bytes_per_diff { let diff = match bytes_per_diff {
1 => d.read_u8()? as u32, 1 => d.read_u8() as u32,
2 => d.read_u16()? as u32, 2 => d.read_u16() as u32,
4 => d.read_u32()?, 4 => d.read_u32(),
_ => unreachable!(), _ => unreachable!(),
}; };
@ -1481,17 +1481,17 @@ impl<D: Decoder> Decodable<D> for SourceFile {
} }
} }
Ok(lines) lines
})?; });
let multibyte_chars: Vec<MultiByteChar> = let multibyte_chars: Vec<MultiByteChar> =
d.read_struct_field("multibyte_chars", |d| Decodable::decode(d))?; d.read_struct_field("multibyte_chars", |d| Decodable::decode(d));
let non_narrow_chars: Vec<NonNarrowChar> = let non_narrow_chars: Vec<NonNarrowChar> =
d.read_struct_field("non_narrow_chars", |d| Decodable::decode(d))?; d.read_struct_field("non_narrow_chars", |d| Decodable::decode(d));
let name_hash: u128 = d.read_struct_field("name_hash", |d| Decodable::decode(d))?; let name_hash: u128 = d.read_struct_field("name_hash", |d| Decodable::decode(d));
let normalized_pos: Vec<NormalizedPos> = let normalized_pos: Vec<NormalizedPos> =
d.read_struct_field("normalized_pos", |d| Decodable::decode(d))?; d.read_struct_field("normalized_pos", |d| Decodable::decode(d));
let cnum: CrateNum = d.read_struct_field("cnum", |d| Decodable::decode(d))?; let cnum: CrateNum = d.read_struct_field("cnum", |d| Decodable::decode(d));
Ok(SourceFile { SourceFile {
name, name,
start_pos, start_pos,
end_pos, end_pos,
@ -1506,7 +1506,7 @@ impl<D: Decoder> Decodable<D> for SourceFile {
normalized_pos, normalized_pos,
name_hash, name_hash,
cnum, cnum,
}) }
}) })
} }
} }
@ -1949,8 +1949,8 @@ impl<S: rustc_serialize::Encoder> Encodable<S> for BytePos {
} }
impl<D: rustc_serialize::Decoder> Decodable<D> for BytePos { impl<D: rustc_serialize::Decoder> Decodable<D> for BytePos {
fn decode(d: &mut D) -> Result<BytePos, D::Error> { fn decode(d: &mut D) -> BytePos {
Ok(BytePos(d.read_u32()?)) BytePos(d.read_u32())
} }
} }

View file

@ -1755,8 +1755,8 @@ impl<S: Encoder> Encodable<S> for Symbol {
impl<D: Decoder> Decodable<D> for Symbol { impl<D: Decoder> Decodable<D> for Symbol {
#[inline] #[inline]
fn decode(d: &mut D) -> Result<Symbol, D::Error> { fn decode(d: &mut D) -> Symbol {
Ok(Symbol::intern(&d.read_str()?)) Symbol::intern(&d.read_str())
} }
} }

View file

@ -292,7 +292,7 @@ crate fn load_call_locations(
for path in with_examples { for path in with_examples {
let bytes = fs::read(&path).map_err(|e| format!("{} (for path {})", e, path))?; let bytes = fs::read(&path).map_err(|e| format!("{} (for path {})", e, path))?;
let mut decoder = Decoder::new(&bytes, 0); let mut decoder = Decoder::new(&bytes, 0);
let calls = AllCallLocations::decode(&mut decoder)?; let calls = AllCallLocations::decode(&mut decoder);
for (function, fn_calls) in calls.into_iter() { for (function, fn_calls) in calls.into_iter() {
all_calls.entry(function).or_default().extend(fn_calls.into_iter()); all_calls.entry(function).or_default().extend(fn_calls.into_iter());

View file

@ -18,6 +18,6 @@ struct A {
fn main() { fn main() {
let obj = A { foo: Box::new([true, false]) }; let obj = A { foo: Box::new([true, false]) };
let s = json::encode(&obj).unwrap(); let s = json::encode(&obj).unwrap();
let obj2: A = json::decode(&s).unwrap(); let obj2: A = json::decode(&s);
assert_eq!(obj.foo, obj2.foo); assert_eq!(obj.foo, obj2.foo);
} }

View file

@ -27,7 +27,7 @@ struct B {
fn main() { fn main() {
let obj = B { foo: Cell::new(true), bar: RefCell::new(A { baz: 2 }) }; let obj = B { foo: Cell::new(true), bar: RefCell::new(A { baz: 2 }) };
let s = json::encode(&obj).unwrap(); let s = json::encode(&obj).unwrap();
let obj2: B = json::decode(&s).unwrap(); let obj2: B = json::decode(&s);
assert_eq!(obj.foo.get(), obj2.foo.get()); assert_eq!(obj.foo.get(), obj2.foo.get());
assert_eq!(obj.bar.borrow().baz, obj2.bar.borrow().baz); assert_eq!(obj.bar.borrow().baz, obj2.bar.borrow().baz);
} }

View file

@ -20,7 +20,7 @@ pub fn main() {
let json_object = json::from_str(&json_str); let json_object = json::from_str(&json_str);
let mut decoder = json::Decoder::new(json_object.unwrap()); let mut decoder = json::Decoder::new(json_object.unwrap());
let mut decoded_obj: UnitLikeStruct = Decodable::decode(&mut decoder).unwrap(); let mut decoded_obj: UnitLikeStruct = Decodable::decode(&mut decoder);
assert_eq!(obj, decoded_obj); assert_eq!(obj, decoded_obj);
} }

View file

@ -12,7 +12,7 @@ trait JD: Decodable<json::Decoder> {}
fn exec<T: JD>() { fn exec<T: JD>() {
let doc = json::from_str("").unwrap(); let doc = json::from_str("").unwrap();
let mut decoder = json::Decoder::new(doc); let mut decoder = json::Decoder::new(doc);
let _v: T = Decodable::decode(&mut decoder).unwrap(); let _v: T = Decodable::decode(&mut decoder);
panic!() panic!()
} }

View file

@ -13,5 +13,5 @@ use rustc_serialize::{json, Decodable};
pub fn main() { pub fn main() {
let json = json::from_str("[1]").unwrap(); let json = json::from_str("[1]").unwrap();
let mut decoder = json::Decoder::new(json); let mut decoder = json::Decoder::new(json);
let _x: Vec<isize> = Decodable::decode(&mut decoder).unwrap(); let _x: Vec<isize> = Decodable::decode(&mut decoder);
} }