Skip to content

Commit

Permalink
Make the codegen unit partitioner also emit item declarations.
Browse files Browse the repository at this point in the history
  • Loading branch information
michaelwoerister committed Apr 28, 2016
1 parent 7f04d35 commit 0fc9f9a
Show file tree
Hide file tree
Showing 15 changed files with 225 additions and 113 deletions.
33 changes: 22 additions & 11 deletions src/librustc_data_structures/bitvec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -52,9 +52,8 @@ impl BitVector {

pub fn grow(&mut self, num_bits: usize) {
let num_words = u64s(num_bits);
let extra_words = self.data.len() - num_words;
if extra_words > 0 {
self.data.extend((0..extra_words).map(|_| 0));
if self.data.len() < num_words {
self.data.resize(num_words, 0)
}
}

Expand Down Expand Up @@ -284,15 +283,27 @@ fn union_two_vecs() {
#[test]
fn grow() {
let mut vec1 = BitVector::new(65);
assert!(vec1.insert(3));
assert!(!vec1.insert(3));
assert!(vec1.insert(5));
assert!(vec1.insert(64));
for index in 0 .. 65 {
assert!(vec1.insert(index));
assert!(!vec1.insert(index));
}
vec1.grow(128);
assert!(vec1.contains(3));
assert!(vec1.contains(5));
assert!(vec1.contains(64));
assert!(!vec1.contains(126));

// Check if the bits set before growing are still set
for index in 0 .. 65 {
assert!(vec1.contains(index));
}

// Check if the new bits are all un-set
for index in 65 .. 128 {
assert!(!vec1.contains(index));
}

// Check that we can set all new bits without running out of bounds
for index in 65 .. 128 {
assert!(vec1.insert(index));
assert!(!vec1.insert(index));
}
}

#[test]
Expand Down
2 changes: 1 addition & 1 deletion src/librustc_llvm/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ pub enum Visibility {
// DLLExportLinkage, GhostLinkage and LinkOnceODRAutoHideLinkage.
// LinkerPrivateLinkage and LinkerPrivateWeakLinkage are not included either;
// they've been removed in upstream LLVM commit r203866.
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
pub enum Linkage {
ExternalLinkage = 0,
AvailableExternallyLinkage = 1,
Expand Down
29 changes: 15 additions & 14 deletions src/librustc_trans/base.rs
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,6 @@ use attributes;
use build::*;
use builder::{Builder, noname};
use callee::{Callee, CallArgs, ArgExprs, ArgVals};
use partitioning::{self, PartitioningStrategy};
use cleanup::{self, CleanupMethods, DropHint};
use closure;
use common::{Block, C_bool, C_bytes_in_context, C_i32, C_int, C_uint, C_integral};
Expand All @@ -83,6 +82,7 @@ use machine::{llalign_of_min, llsize_of, llsize_of_real};
use meth;
use mir;
use monomorphize::{self, Instance};
use partitioning::{self, PartitioningStrategy, InstantiationMode};
use symbol_names_test;
use tvec;
use type_::Type;
Expand Down Expand Up @@ -2934,7 +2934,7 @@ fn collect_translation_items<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>) {
None => TransItemCollectionMode::Lazy
};

let (items, inlining_map) = time(time_passes, "translation item collection", || {
let (items, reference_map) = time(time_passes, "translation item collection", || {
collector::collect_crate_translation_items(&ccx, collection_mode)
});

Expand All @@ -2948,7 +2948,7 @@ fn collect_translation_items<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>) {
partitioning::partition(ccx.tcx(),
items.iter().cloned(),
strategy,
&inlining_map)
&reference_map)
});

if ccx.sess().opts.debugging_opts.print_trans_items.is_some() {
Expand Down Expand Up @@ -2976,17 +2976,18 @@ fn collect_translation_items<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>) {
output.push_str(&cgu_name[..]);

let linkage_abbrev = match linkage {
llvm::ExternalLinkage => "External",
llvm::AvailableExternallyLinkage => "Available",
llvm::LinkOnceAnyLinkage => "OnceAny",
llvm::LinkOnceODRLinkage => "OnceODR",
llvm::WeakAnyLinkage => "WeakAny",
llvm::WeakODRLinkage => "WeakODR",
llvm::AppendingLinkage => "Appending",
llvm::InternalLinkage => "Internal",
llvm::PrivateLinkage => "Private",
llvm::ExternalWeakLinkage => "ExternalWeak",
llvm::CommonLinkage => "Common",
InstantiationMode::Def(llvm::ExternalLinkage) => "External",
InstantiationMode::Def(llvm::AvailableExternallyLinkage) => "Available",
InstantiationMode::Def(llvm::LinkOnceAnyLinkage) => "OnceAny",
InstantiationMode::Def(llvm::LinkOnceODRLinkage) => "OnceODR",
InstantiationMode::Def(llvm::WeakAnyLinkage) => "WeakAny",
InstantiationMode::Def(llvm::WeakODRLinkage) => "WeakODR",
InstantiationMode::Def(llvm::AppendingLinkage) => "Appending",
InstantiationMode::Def(llvm::InternalLinkage) => "Internal",
InstantiationMode::Def(llvm::PrivateLinkage) => "Private",
InstantiationMode::Def(llvm::ExternalWeakLinkage) => "ExternalWeak",
InstantiationMode::Def(llvm::CommonLinkage) => "Common",
InstantiationMode::Decl => "Declaration",
};

output.push_str("[");
Expand Down
104 changes: 86 additions & 18 deletions src/librustc_trans/collector.rs
Original file line number Diff line number Diff line change
Expand Up @@ -188,6 +188,8 @@
//! this is not implemented however: a translation item will be produced
//! regardless of whether it is actually needed or not.

use rustc_data_structures::bitvec::BitVector;

use rustc::hir;
use rustc::hir::intravisit as hir_visit;

Expand Down Expand Up @@ -252,12 +254,76 @@ impl<'tcx> Hash for TransItem<'tcx> {
}
}

pub type InliningMap<'tcx> = FnvHashMap<TransItem<'tcx>, FnvHashSet<TransItem<'tcx>>>;
/// Maps every translation item to all translation items it references in its
/// body.
pub struct ReferenceMap<'tcx> {
// Maps a source translation item to a range of target translation items.
// The two numbers in the tuple are the start (inclusive) and
// end index (exclusive) within the `targets` and the `inlined` vecs.
index: FnvHashMap<TransItem<'tcx>, (usize, usize)>,
targets: Vec<TransItem<'tcx>>,
inlined: BitVector
}

impl<'tcx> ReferenceMap<'tcx> {

fn new() -> ReferenceMap<'tcx> {
ReferenceMap {
index: FnvHashMap(),
targets: Vec::new(),
inlined: BitVector::new(64 * 256),
}
}

fn record_references<I>(&mut self, source: TransItem<'tcx>, targets: I)
where I: Iterator<Item=(TransItem<'tcx>, bool)>
{
assert!(!self.index.contains_key(&source));

let start_index = self.targets.len();

for (target, inlined) in targets {
let index = self.targets.len();
self.targets.push(target);
self.inlined.grow(index + 1);

if inlined {
self.inlined.insert(index);
}
}

let end_index = self.targets.len();
self.index.insert(source, (start_index, end_index));
}

// Internally iterate over all items referenced by `source` which will be
// made available for inlining.
pub fn with_inlining_candidates<F>(&self, source: TransItem<'tcx>, mut f: F)
where F: FnMut(TransItem<'tcx>) {
if let Some(&(start_index, end_index)) = self.index.get(&source)
{
for index in start_index .. end_index {
if self.inlined.contains(index) {
f(self.targets[index])
}
}
}
}

pub fn get_direct_references_from(&self, source: TransItem<'tcx>) -> &[TransItem<'tcx>]
{
if let Some(&(start_index, end_index)) = self.index.get(&source) {
&self.targets[start_index .. end_index]
} else {
&self.targets[0 .. 0]
}
}
}

pub fn collect_crate_translation_items<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
mode: TransItemCollectionMode)
-> (FnvHashSet<TransItem<'tcx>>,
InliningMap<'tcx>) {
ReferenceMap<'tcx>) {
// We are not tracking dependencies of this pass as it has to be re-executed
// every time no matter what.
ccx.tcx().dep_graph.with_ignore(|| {
Expand All @@ -266,17 +332,17 @@ pub fn collect_crate_translation_items<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
debug!("Building translation item graph, beginning at roots");
let mut visited = FnvHashSet();
let mut recursion_depths = DefIdMap();
let mut inlining_map = FnvHashMap();
let mut reference_map = ReferenceMap::new();

for root in roots {
collect_items_rec(ccx,
root,
&mut visited,
&mut recursion_depths,
&mut inlining_map);
&mut reference_map);
}

(visited, inlining_map)
(visited, reference_map)
})
}

Expand Down Expand Up @@ -307,7 +373,7 @@ fn collect_items_rec<'a, 'tcx: 'a>(ccx: &CrateContext<'a, 'tcx>,
starting_point: TransItem<'tcx>,
visited: &mut FnvHashSet<TransItem<'tcx>>,
recursion_depths: &mut DefIdMap<usize>,
inlining_map: &mut InliningMap<'tcx>) {
reference_map: &mut ReferenceMap<'tcx>) {
if !visited.insert(starting_point.clone()) {
// We've been here already, no need to search again.
return;
Expand Down Expand Up @@ -351,9 +417,10 @@ fn collect_items_rec<'a, 'tcx: 'a>(ccx: &CrateContext<'a, 'tcx>,
}
}

record_references(ccx, starting_point, &neighbors[..], reference_map);

for neighbour in neighbors {
record_inlined_use(ccx, starting_point, neighbour, inlining_map);
collect_items_rec(ccx, neighbour, visited, recursion_depths, inlining_map);
collect_items_rec(ccx, neighbour, visited, recursion_depths, reference_map);
}

if let Some((def_id, depth)) = recursion_depth_reset {
Expand All @@ -363,16 +430,17 @@ fn collect_items_rec<'a, 'tcx: 'a>(ccx: &CrateContext<'a, 'tcx>,
debug!("END collect_items_rec({})", starting_point.to_string(ccx));
}

fn record_inlined_use<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
caller: TransItem<'tcx>,
callee: TransItem<'tcx>,
inlining_map: &mut InliningMap<'tcx>) {
if callee.is_from_extern_crate() ||
callee.requests_inline(ccx.tcx()) {
inlining_map.entry(caller)
.or_insert_with(|| FnvHashSet())
.insert(callee);
}
fn record_references<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
caller: TransItem<'tcx>,
callees: &[TransItem<'tcx>],
reference_map: &mut ReferenceMap<'tcx>) {
let iter = callees.into_iter()
.map(|callee| {
let is_inlining_candidate = callee.is_from_extern_crate() ||
callee.requests_inline(ccx.tcx());
(*callee, is_inlining_candidate)
});
reference_map.record_references(caller, iter);
}

fn check_recursion_limit<'a, 'tcx: 'a>(ccx: &CrateContext<'a, 'tcx>,
Expand Down
Loading

0 comments on commit 0fc9f9a

Please sign in to comment.