rustc_metadata: use the shorthand encoding for predicates also.
This commit is contained in:
parent
cc47dc5c6e
commit
6890354f3b
6 changed files with 67 additions and 124 deletions
|
@ -72,8 +72,6 @@ pub mod root_tag {
|
|||
pub const crate_info: usize = 0x104;
|
||||
|
||||
pub const index: usize = 0x110;
|
||||
pub const xref_index: usize = 0x111;
|
||||
pub const xref_data: usize = 0x112;
|
||||
pub const crate_deps: usize = 0x102;
|
||||
pub const dylib_dependency_formats: usize = 0x106;
|
||||
pub const native_libraries: usize = 0x10a;
|
||||
|
@ -202,7 +200,7 @@ pub mod item_tag {
|
|||
pub const fn_arg_names: usize = 0x85;
|
||||
}
|
||||
|
||||
/// The shorthand encoding of `Ty` uses `TypeVariants`' variant `usize`
|
||||
/// The shorthand encoding uses an enum's variant index `usize`
|
||||
/// and is offset by this value so it never matches a real variant.
|
||||
/// This offset is also chosen so that the first byte is never < 0x80.
|
||||
pub const TYPE_SHORTHAND_OFFSET: usize = 0x80;
|
||||
pub const SHORTHAND_OFFSET: usize = 0x80;
|
||||
|
|
|
@ -329,7 +329,6 @@ impl<'a> CrateReader<'a> {
|
|||
extern_crate: Cell::new(None),
|
||||
info: crate_info,
|
||||
index: decoder::load_index(metadata.as_slice()),
|
||||
xref_index: decoder::load_xrefs(metadata.as_slice()),
|
||||
key_map: decoder::load_key_map(metadata.as_slice()),
|
||||
data: metadata,
|
||||
cnum_map: RefCell::new(cnum_map),
|
||||
|
|
|
@ -80,7 +80,6 @@ pub struct CrateMetadata {
|
|||
|
||||
pub info: common::CrateInfo,
|
||||
pub index: index::Index,
|
||||
pub xref_index: index::DenseIndex,
|
||||
|
||||
/// For each public item in this crate, we encode a key. When the
|
||||
/// crate is loaded, we read all the keys and put them in this
|
||||
|
|
|
@ -243,10 +243,10 @@ impl<'a, 'tcx> SpecializedDecoder<Ty<'tcx>> for DecodeContext<'a, 'tcx> {
|
|||
// Handle shorthands first, if we have an usize > 0x80.
|
||||
if self.opaque.data[self.opaque.position()] & 0x80 != 0 {
|
||||
let pos = self.read_usize()?;
|
||||
assert!(pos >= TYPE_SHORTHAND_OFFSET);
|
||||
assert!(pos >= SHORTHAND_OFFSET);
|
||||
let key = ty::CReaderCacheKey {
|
||||
cnum: self.cdata().cnum,
|
||||
pos: pos - TYPE_SHORTHAND_OFFSET
|
||||
pos: pos - SHORTHAND_OFFSET
|
||||
};
|
||||
if let Some(ty) = tcx.rcache.borrow().get(&key).cloned() {
|
||||
return Ok(ty);
|
||||
|
@ -333,11 +333,6 @@ pub fn crate_rustc_version(data: &[u8]) -> Option<String> {
|
|||
})
|
||||
}
|
||||
|
||||
pub fn load_xrefs(data: &[u8]) -> index::DenseIndex {
|
||||
let index = rbml::Doc::new(data).get(root_tag::xref_index);
|
||||
index::DenseIndex::from_buf(index.data, index.start, index.end)
|
||||
}
|
||||
|
||||
// Go through each item in the metadata and create a map from that
|
||||
// item's def-key to the item's DefIndex.
|
||||
pub fn load_key_map(data: &[u8]) -> FnvHashMap<DefKey, DefIndex> {
|
||||
|
@ -1099,20 +1094,28 @@ fn doc_predicates<'a, 'tcx>(base_doc: rbml::Doc,
|
|||
{
|
||||
let mut dcx = base_doc.get(tag).decoder();
|
||||
dcx.cdata = Some(cdata);
|
||||
dcx.tcx = Some(tcx);
|
||||
|
||||
ty::GenericPredicates {
|
||||
parent: dcx.decode(),
|
||||
predicates: dcx.seq().map(|offset| {
|
||||
let predicate_pos = cdata.xref_index.lookup(
|
||||
cdata.data(), offset).unwrap() as usize;
|
||||
let mut dcx = rbml::Doc {
|
||||
data: cdata.data(),
|
||||
start: predicate_pos,
|
||||
end: cdata.data().len(),
|
||||
}.decoder();
|
||||
dcx.tcx = Some(tcx);
|
||||
dcx.cdata = Some(cdata);
|
||||
dcx.decode()
|
||||
predicates: (0..dcx.decode::<usize>()).map(|_| {
|
||||
// Handle shorthands first, if we have an usize > 0x80.
|
||||
if dcx.opaque.data[dcx.opaque.position()] & 0x80 != 0 {
|
||||
let pos = dcx.decode::<usize>();
|
||||
assert!(pos >= SHORTHAND_OFFSET);
|
||||
let pos = pos - SHORTHAND_OFFSET;
|
||||
|
||||
let mut dcx = rbml::Doc {
|
||||
data: cdata.data(),
|
||||
start: pos,
|
||||
end: cdata.data().len(),
|
||||
}.decoder();
|
||||
dcx.tcx = Some(tcx);
|
||||
dcx.cdata = Some(cdata);
|
||||
dcx.decode()
|
||||
} else {
|
||||
dcx.decode()
|
||||
}
|
||||
}).collect()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,7 +16,7 @@
|
|||
use astencode::encode_inlined_item;
|
||||
use common::*;
|
||||
use cstore;
|
||||
use index::{self, IndexData};
|
||||
use index::IndexData;
|
||||
|
||||
use rustc::middle::cstore::{InlinedItemRef, LinkMeta, LinkagePreference};
|
||||
use rustc::hir::def;
|
||||
|
@ -30,11 +30,10 @@ use rustc::session::config::{self, CrateTypeRustcMacro};
|
|||
use rustc::util::nodemap::{FnvHashMap, NodeSet};
|
||||
|
||||
use rustc_serialize::{Encodable, Encoder, SpecializedEncoder, opaque};
|
||||
use std::cell::RefCell;
|
||||
use std::hash::Hash;
|
||||
use std::intrinsics;
|
||||
use std::io::prelude::*;
|
||||
use std::io::Cursor;
|
||||
use std::mem;
|
||||
use std::ops::{Deref, DerefMut};
|
||||
use std::rc::Rc;
|
||||
use std::u32;
|
||||
|
@ -58,14 +57,10 @@ pub struct EncodeContext<'a, 'tcx: 'a> {
|
|||
reachable: &'a NodeSet,
|
||||
mir_map: &'a MirMap<'tcx>,
|
||||
|
||||
type_shorthands: RefCell<FnvHashMap<Ty<'tcx>, usize>>,
|
||||
xrefs: FnvHashMap<XRef<'tcx>, u32>, // sequentially-assigned
|
||||
type_shorthands: FnvHashMap<Ty<'tcx>, usize>,
|
||||
predicate_shorthands: FnvHashMap<ty::Predicate<'tcx>, usize>,
|
||||
}
|
||||
|
||||
/// "interned" entries referenced by id
|
||||
#[derive(PartialEq, Eq, Hash)]
|
||||
enum XRef<'tcx> { Predicate(ty::Predicate<'tcx>) }
|
||||
|
||||
impl<'a, 'tcx> Deref for EncodeContext<'a, 'tcx> {
|
||||
type Target = rbml::writer::Encoder<'a>;
|
||||
fn deref(&self) -> &Self::Target {
|
||||
|
@ -117,32 +112,7 @@ impl<'a, 'tcx> Encoder for EncodeContext<'a, 'tcx> {
|
|||
|
||||
impl<'a, 'tcx> SpecializedEncoder<Ty<'tcx>> for EncodeContext<'a, 'tcx> {
|
||||
fn specialized_encode(&mut self, ty: &Ty<'tcx>) -> Result<(), Self::Error> {
|
||||
let existing_shorthand = self.type_shorthands.borrow().get(ty).cloned();
|
||||
if let Some(shorthand) = existing_shorthand {
|
||||
return self.emit_usize(shorthand);
|
||||
}
|
||||
|
||||
let start = self.mark_stable_position();
|
||||
ty.sty.encode(self)?;
|
||||
let len = self.mark_stable_position() - start;
|
||||
|
||||
// The shorthand encoding uses the same usize as the
|
||||
// discriminant, with an offset so they can't conflict.
|
||||
let discriminant = unsafe { intrinsics::discriminant_value(&ty.sty) };
|
||||
assert!(discriminant < TYPE_SHORTHAND_OFFSET as u64);
|
||||
let shorthand = start + TYPE_SHORTHAND_OFFSET;
|
||||
|
||||
// Get the number of bits that leb128 could fit
|
||||
// in the same space as the fully encoded type.
|
||||
let leb128_bits = len * 7;
|
||||
|
||||
// Check that the shorthand is a not longer than the
|
||||
// full encoding itself, i.e. it's an obvious win.
|
||||
if leb128_bits >= 64 || (shorthand as u64) < (1 << leb128_bits) {
|
||||
self.type_shorthands.borrow_mut().insert(*ty, shorthand);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
self.encode_with_shorthand(ty, &ty.sty, |ecx| &mut ecx.type_shorthands)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -163,6 +133,42 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
|
|||
}).unwrap();
|
||||
}
|
||||
|
||||
/// Encode the given value or a previously cached shorthand.
|
||||
fn encode_with_shorthand<T, U, M>(&mut self, value: &T, variant: &U, map: M)
|
||||
-> Result<(), <Self as Encoder>::Error>
|
||||
where M: for<'b> Fn(&'b mut Self) -> &'b mut FnvHashMap<T, usize>,
|
||||
T: Clone + Eq + Hash,
|
||||
U: Encodable {
|
||||
let existing_shorthand = map(self).get(value).cloned();
|
||||
if let Some(shorthand) = existing_shorthand {
|
||||
return self.emit_usize(shorthand);
|
||||
}
|
||||
|
||||
let start = self.mark_stable_position();
|
||||
variant.encode(self)?;
|
||||
let len = self.mark_stable_position() - start;
|
||||
|
||||
// The shorthand encoding uses the same usize as the
|
||||
// discriminant, with an offset so they can't conflict.
|
||||
let discriminant = unsafe {
|
||||
intrinsics::discriminant_value(variant)
|
||||
};
|
||||
assert!(discriminant < SHORTHAND_OFFSET as u64);
|
||||
let shorthand = start + SHORTHAND_OFFSET;
|
||||
|
||||
// Get the number of bits that leb128 could fit
|
||||
// in the same space as the fully encoded type.
|
||||
let leb128_bits = len * 7;
|
||||
|
||||
// Check that the shorthand is a not longer than the
|
||||
// full encoding itself, i.e. it's an obvious win.
|
||||
if leb128_bits >= 64 || (shorthand as u64) < (1 << leb128_bits) {
|
||||
map(self).insert(value.clone(), shorthand);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// For every DefId that we create a metadata item for, we include a
|
||||
/// serialized copy of its DefKey, which allows us to recreate a path.
|
||||
fn encode_def_key(&mut self, def_id: DefId) {
|
||||
|
@ -393,7 +399,8 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
|
|||
self.start_tag(tag);
|
||||
predicates.parent.encode(self).unwrap();
|
||||
self.seq(&predicates.predicates, |ecx, predicate| {
|
||||
ecx.add_xref(XRef::Predicate(predicate.clone()))
|
||||
ecx.encode_with_shorthand(predicate, predicate,
|
||||
|ecx| &mut ecx.predicate_shorthands).unwrap()
|
||||
});
|
||||
self.end_tag();
|
||||
}
|
||||
|
@ -575,34 +582,6 @@ fn encode_stability(ecx: &mut EncodeContext, def_id: DefId) {
|
|||
}
|
||||
|
||||
impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
|
||||
fn add_xref(&mut self, xref: XRef<'tcx>) -> u32 {
|
||||
let old_len = self.xrefs.len() as u32;
|
||||
*self.xrefs.entry(xref).or_insert(old_len)
|
||||
}
|
||||
|
||||
fn encode_xrefs(&mut self) {
|
||||
let xrefs = mem::replace(&mut self.xrefs, Default::default());
|
||||
let mut xref_positions = vec![0; xrefs.len()];
|
||||
|
||||
// Encode XRefs sorted by their ID
|
||||
let mut sorted_xrefs: Vec<_> = xrefs.into_iter().collect();
|
||||
sorted_xrefs.sort_by_key(|&(_, id)| id);
|
||||
|
||||
self.start_tag(root_tag::xref_data);
|
||||
for (xref, id) in sorted_xrefs.into_iter() {
|
||||
xref_positions[id as usize] = self.mark_stable_position() as u32;
|
||||
match xref {
|
||||
XRef::Predicate(p) => p.encode(self).unwrap()
|
||||
}
|
||||
}
|
||||
self.mark_stable_position();
|
||||
self.end_tag();
|
||||
|
||||
self.start_tag(root_tag::xref_index);
|
||||
index::write_dense_index(xref_positions, &mut self.opaque.cursor);
|
||||
self.end_tag();
|
||||
}
|
||||
|
||||
fn encode_info_for_item(&mut self,
|
||||
(def_id, item): (DefId, &hir::Item)) {
|
||||
let tcx = self.tcx;
|
||||
|
@ -1233,7 +1212,7 @@ pub fn encode_metadata<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
|||
reachable: reachable,
|
||||
mir_map: mir_map,
|
||||
type_shorthands: Default::default(),
|
||||
xrefs: Default::default()
|
||||
predicate_shorthands: Default::default()
|
||||
});
|
||||
|
||||
// RBML compacts the encoded bytes whenever appropriate,
|
||||
|
@ -1345,10 +1324,6 @@ fn encode_metadata_inner(ecx: &mut EncodeContext) {
|
|||
encode_item_index(ecx, items);
|
||||
let index_bytes = ecx.position() - i;
|
||||
|
||||
i = ecx.position();
|
||||
ecx.encode_xrefs();
|
||||
let xref_bytes = ecx.position() - i;
|
||||
|
||||
let total_bytes = ecx.position();
|
||||
|
||||
if ecx.tcx.sess.meta_stats() {
|
||||
|
@ -1369,7 +1344,6 @@ fn encode_metadata_inner(ecx: &mut EncodeContext) {
|
|||
println!(" reachable bytes: {}", reachable_bytes);
|
||||
println!(" item bytes: {}", item_bytes);
|
||||
println!(" index bytes: {}", index_bytes);
|
||||
println!(" xref bytes: {}", xref_bytes);
|
||||
println!(" zero bytes: {}", zero_bytes);
|
||||
println!(" total bytes: {}", total_bytes);
|
||||
}
|
||||
|
|
|
@ -108,36 +108,6 @@ impl IndexData {
|
|||
}
|
||||
}
|
||||
|
||||
/// A dense index with integer keys. Different API from IndexData (should
|
||||
/// these be merged?)
|
||||
pub struct DenseIndex {
|
||||
start: usize,
|
||||
end: usize
|
||||
}
|
||||
|
||||
impl DenseIndex {
|
||||
pub fn lookup(&self, buf: &[u8], ix: u32) -> Option<u32> {
|
||||
let data = bytes_to_words(&buf[self.start..self.end]);
|
||||
data.get(ix as usize).map(|d| u32::from_le(*d))
|
||||
}
|
||||
pub fn from_buf(buf: &[u8], start: usize, end: usize) -> Self {
|
||||
assert!((end-start)%4 == 0 && start <= end && end <= buf.len());
|
||||
DenseIndex {
|
||||
start: start,
|
||||
end: end
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn write_dense_index(entries: Vec<u32>, buf: &mut Cursor<Vec<u8>>) {
|
||||
let elen = entries.len();
|
||||
assert!(elen < u32::MAX as usize);
|
||||
|
||||
buf.write_all(words_to_bytes(&entries)).unwrap();
|
||||
|
||||
info!("write_dense_index: {} entries", elen);
|
||||
}
|
||||
|
||||
fn bytes_to_words(b: &[u8]) -> &[u32] {
|
||||
assert!(b.len() % 4 == 0);
|
||||
unsafe { slice::from_raw_parts(b.as_ptr() as *const u32, b.len()/4) }
|
||||
|
|
Loading…
Add table
Reference in a new issue