Fixed coverage map issues; better aligned with LLVM APIs

Found some problems with the coverage map encoding when testing with
more than one counter per function.

While debugging, I realized some better ways to structure the Rust
implementation of the coverage mapping generator. I refactored somewhat,
resulting in less code overall, expanded coverage of LLVM Coverage Map
capabilities, and much closer alignment with LLVM data structures, APIs,
and naming.

This should be easier to follow and easier to maintain.
This commit is contained in:
Rich Kadel 2020-07-24 21:14:28 -07:00
parent c4e173472b
commit 12ddd6073a
13 changed files with 612 additions and 600 deletions

View File

@ -1,23 +1,14 @@
use crate::llvm;
use crate::common::CodegenCx;
use crate::coverageinfo;
use crate::llvm;
use log::debug;
use rustc_codegen_ssa::coverageinfo::map::*;
use rustc_codegen_ssa::traits::{BaseTypeMethods, ConstMethods, MiscMethods};
use rustc_codegen_ssa::traits::{BaseTypeMethods, ConstMethods};
use rustc_data_structures::fx::FxHashMap;
use rustc_llvm::RustString;
use rustc_middle::ty::Instance;
use rustc_middle::{bug, mir};
use std::collections::BTreeMap;
use std::ffi::CString;
use std::path::PathBuf;
// FIXME(richkadel): Complete all variations of generating and exporting the coverage map to LLVM.
// The current implementation is an initial foundation with basic capabilities (Counters, but not
// CounterExpressions, etc.).
/// Generates and exports the Coverage Map.
///
@ -32,174 +23,123 @@ use std::path::PathBuf;
/// undocumented details in Clang's implementation (that may or may not be important) were also
/// replicated for Rust's Coverage Map.
pub fn finalize<'ll, 'tcx>(cx: &CodegenCx<'ll, 'tcx>) {
let mut coverage_writer = CoverageMappingWriter::new(cx);
let function_coverage_map = cx.coverage_context().take_function_coverage_map();
if function_coverage_map.len() == 0 {
// This module has no functions with coverage instrumentation
return;
}
let mut mapgen = CoverageMapGenerator::new();
// Encode coverage mappings and generate function records
let mut function_records = Vec::<&'ll llvm::Value>::new();
let coverage_mappings_buffer = llvm::build_byte_buffer(|coverage_mappings_buffer| {
for (instance, function_coverage) in function_coverage_map.into_iter() {
if let Some(function_record) = coverage_writer.write_function_mappings_and_record(
instance,
function_coverage,
coverage_mappings_buffer,
) {
function_records.push(function_record);
}
debug!("Generate coverage map for: {:?}", instance);
let mangled_function_name = cx.tcx.symbol_name(instance).to_string();
let function_source_hash = function_coverage.source_hash();
let (expressions, counter_regions) =
function_coverage.get_expressions_and_counter_regions();
let old_len = coverage_mappings_buffer.len();
mapgen.write_coverage_mappings(expressions, counter_regions, coverage_mappings_buffer);
let mapping_data_size = coverage_mappings_buffer.len() - old_len;
debug_assert!(
mapping_data_size > 0,
"Every `FunctionCoverage` should have at least one counter"
);
let function_record = mapgen.make_function_record(
cx,
mangled_function_name,
function_source_hash,
mapping_data_size,
);
function_records.push(function_record);
}
});
// Encode all filenames covered in this module, ordered by `file_id`
// Encode all filenames referenced by counters/expressions in this module
let filenames_buffer = llvm::build_byte_buffer(|filenames_buffer| {
coverageinfo::write_filenames_section_to_buffer(
&coverage_writer.filenames,
filenames_buffer,
);
coverageinfo::write_filenames_section_to_buffer(&mapgen.filenames, filenames_buffer);
});
if coverage_mappings_buffer.len() > 0 {
// Generate the LLVM IR representation of the coverage map and store it in a well-known
// global constant.
coverage_writer.write_coverage_map(
function_records,
filenames_buffer,
coverage_mappings_buffer,
);
}
// Generate the LLVM IR representation of the coverage map and store it in a well-known global
mapgen.save_generated_coverage_map(
cx,
function_records,
filenames_buffer,
coverage_mappings_buffer,
);
}
struct CoverageMappingWriter<'a, 'll, 'tcx> {
cx: &'a CodegenCx<'ll, 'tcx>,
struct CoverageMapGenerator {
filenames: Vec<CString>,
filename_to_index: FxHashMap<CString, u32>,
}
impl<'a, 'll, 'tcx> CoverageMappingWriter<'a, 'll, 'tcx> {
fn new(cx: &'a CodegenCx<'ll, 'tcx>) -> Self {
Self { cx, filenames: Vec::new(), filename_to_index: FxHashMap::<CString, u32>::default() }
impl CoverageMapGenerator {
fn new() -> Self {
Self { filenames: Vec::new(), filename_to_index: FxHashMap::<CString, u32>::default() }
}
/// For the given function, get the coverage region data, stream it to the given buffer, and
/// then generate and return a new function record.
fn write_function_mappings_and_record(
/// Using the `expressions` and `counter_regions` collected for the current function, generate
/// the `mapping_regions` and `virtual_file_mapping`, and capture any new filenames. Then use
/// LLVM APIs to encode the `virtual_file_mapping`, `expressions`, and `mapping_regions` into
/// the given `coverage_mappings` byte buffer, compliant with the LLVM Coverage Mapping format.
fn write_coverage_mappings(
&mut self,
instance: Instance<'tcx>,
mut function_coverage: FunctionCoverage,
coverage_mappings_buffer: &RustString,
) -> Option<&'ll llvm::Value> {
let cx = self.cx;
let coverageinfo: &mir::CoverageInfo = cx.tcx.coverageinfo(instance.def_id());
debug!(
"Generate coverage map for: {:?}, num_counters: {}, num_expressions: {}",
instance, coverageinfo.num_counters, coverageinfo.num_expressions
);
debug_assert!(coverageinfo.num_counters > 0);
let regions_in_file_order = function_coverage.regions_in_file_order(cx.sess().source_map());
if regions_in_file_order.len() == 0 {
return None;
}
// Stream the coverage mapping regions for the function (`instance`) to the buffer, and
// compute the data byte size used.
let old_len = coverage_mappings_buffer.len();
self.regions_to_mappings(regions_in_file_order, coverage_mappings_buffer);
let mapping_data_size = coverage_mappings_buffer.len() - old_len;
debug_assert!(mapping_data_size > 0);
let mangled_function_name = cx.tcx.symbol_name(instance).to_string();
let name_ref = coverageinfo::compute_hash(&mangled_function_name);
let function_source_hash = function_coverage.source_hash();
// Generate and return the function record
let name_ref_val = cx.const_u64(name_ref);
let mapping_data_size_val = cx.const_u32(mapping_data_size as u32);
let func_hash_val = cx.const_u64(function_source_hash);
Some(cx.const_struct(
&[name_ref_val, mapping_data_size_val, func_hash_val],
/*packed=*/ true,
))
}
/// For each coverage region, extract its coverage data from the earlier coverage analysis.
/// Use LLVM APIs to convert the data into buffered bytes compliant with the LLVM Coverage
/// Mapping format.
fn regions_to_mappings(
&mut self,
regions_in_file_order: BTreeMap<PathBuf, BTreeMap<CoverageLoc, (usize, CoverageKind)>>,
expressions: Vec<CounterExpression>,
counter_regions: impl Iterator<Item = (Counter, &'a Region)>,
coverage_mappings_buffer: &RustString,
) {
let mut counter_regions = counter_regions.collect::<Vec<_>>();
if counter_regions.len() == 0 {
return;
}
let mut virtual_file_mapping = Vec::new();
let mut mapping_regions = coverageinfo::SmallVectorCounterMappingRegion::new();
let mut expressions = coverageinfo::SmallVectorCounterExpression::new();
let mut mapping_regions = Vec::new();
let mut current_file_path = None;
let mut current_file_id = 0;
for (file_id, (file_path, file_coverage_regions)) in
regions_in_file_order.into_iter().enumerate()
{
let file_id = file_id as u32;
let filename = CString::new(file_path.to_string_lossy().to_string())
.expect("null error converting filename to C string");
debug!(" file_id: {} = '{:?}'", file_id, filename);
let filenames_index = match self.filename_to_index.get(&filename) {
Some(index) => *index,
None => {
let index = self.filenames.len() as u32;
self.filenames.push(filename.clone());
self.filename_to_index.insert(filename, index);
index
}
};
virtual_file_mapping.push(filenames_index);
let mut mapping_indexes = vec![0 as u32; file_coverage_regions.len()];
for (mapping_index, (region_id, _)) in file_coverage_regions.values().enumerate() {
mapping_indexes[*region_id] = mapping_index as u32;
}
for (region_loc, (region_id, region_kind)) in file_coverage_regions.into_iter() {
let mapping_index = mapping_indexes[region_id];
match region_kind {
CoverageKind::Counter => {
debug!(
" Counter {}, file_id: {}, region_loc: {}",
mapping_index, file_id, region_loc
);
mapping_regions.push_from(
mapping_index,
file_id,
region_loc.start_line,
region_loc.start_col,
region_loc.end_line,
region_loc.end_col,
);
}
CoverageKind::CounterExpression(lhs, op, rhs) => {
debug!(
" CounterExpression {} = {} {:?} {}, file_id: {}, region_loc: {:?}",
mapping_index, lhs, op, rhs, file_id, region_loc,
);
mapping_regions.push_from(
mapping_index,
file_id,
region_loc.start_line,
region_loc.start_col,
region_loc.end_line,
region_loc.end_col,
);
expressions.push_from(op, lhs, rhs);
}
CoverageKind::Unreachable => {
debug!(
" Unreachable region, file_id: {}, region_loc: {:?}",
file_id, region_loc,
);
bug!("Unreachable region not expected and not yet handled!")
// FIXME(richkadel): implement and call
// mapping_regions.push_from(...) for unreachable regions
}
// Convert the list of (Counter, Region) pairs to an array of `CounterMappingRegion`, sorted
// by filename and position. Capture any new files to compute the `CounterMappingRegion`s
// `file_id` (indexing files referenced by the current function), and construct the
// function-specific `virtual_file_mapping` from `file_id` to its index in the module's
// `filenames` array.
counter_regions.sort_by_key(|(_counter, region)| *region);
for (counter, region) in counter_regions {
let (file_path, start_line, start_col, end_line, end_col) = region.file_start_and_end();
let same_file = current_file_path.as_ref().map_or(false, |p| p == file_path);
if !same_file {
if current_file_path.is_some() {
current_file_id += 1;
}
current_file_path = Some(file_path.clone());
let filename = CString::new(file_path.to_string_lossy().to_string())
.expect("null error converting filename to C string");
debug!(" file_id: {} = '{:?}'", current_file_id, filename);
let filenames_index = match self.filename_to_index.get(&filename) {
Some(index) => *index,
None => {
let index = self.filenames.len() as u32;
self.filenames.push(filename.clone());
self.filename_to_index.insert(filename.clone(), index);
index
}
};
virtual_file_mapping.push(filenames_index);
}
mapping_regions.push(coverageinfo::CounterMappingRegion::code_region(
counter,
current_file_id,
start_line,
start_col,
end_line,
end_col,
));
}
// Encode and append the current function's coverage mapping data
@ -211,14 +151,35 @@ impl<'a, 'll, 'tcx> CoverageMappingWriter<'a, 'll, 'tcx> {
);
}
fn write_coverage_map(
/// Generate and return the function record `Value`
fn make_function_record(
&mut self,
cx: &CodegenCx<'ll, 'tcx>,
mangled_function_name: String,
function_source_hash: u64,
mapping_data_size: usize,
) -> &'ll llvm::Value {
let name_ref = coverageinfo::compute_hash(&mangled_function_name);
let name_ref_val = cx.const_u64(name_ref);
let mapping_data_size_val = cx.const_u32(mapping_data_size as u32);
let func_hash_val = cx.const_u64(function_source_hash);
cx.const_struct(
&[name_ref_val, mapping_data_size_val, func_hash_val],
/*packed=*/ true,
)
}
/// Combine the filenames and coverage mappings buffers, construct coverage map header and the
/// array of function records, and combine everything into the complete coverage map. Save the
/// coverage map data into the LLVM IR as a static global using a specific, well-known section
/// and name.
fn save_generated_coverage_map(
self,
cx: &CodegenCx<'ll, 'tcx>,
function_records: Vec<&'ll llvm::Value>,
filenames_buffer: Vec<u8>,
mut coverage_mappings_buffer: Vec<u8>,
) {
let cx = self.cx;
// Concatenate the encoded filenames and encoded coverage mappings, and add additional zero
// bytes as-needed to ensure 8-byte alignment.
let mut coverage_size = coverage_mappings_buffer.len();

View File

@ -23,7 +23,7 @@ const COVMAP_VAR_ALIGN_BYTES: usize = 8;
/// A context object for maintaining all state needed by the coverageinfo module.
pub struct CrateCoverageContext<'tcx> {
// Coverage region data for each instrumented function identified by DefId.
pub(crate) function_coverage_map: RefCell<FxHashMap<Instance<'tcx>, FunctionCoverage>>,
pub(crate) function_coverage_map: RefCell<FxHashMap<Instance<'tcx>, FunctionCoverage<'tcx>>>,
}
impl<'tcx> CrateCoverageContext<'tcx> {
@ -31,7 +31,7 @@ impl<'tcx> CrateCoverageContext<'tcx> {
Self { function_coverage_map: Default::default() }
}
pub fn take_function_coverage_map(&self) -> FxHashMap<Instance<'tcx>, FunctionCoverage> {
pub fn take_function_coverage_map(&self) -> FxHashMap<Instance<'tcx>, FunctionCoverage<'tcx>> {
self.function_coverage_map.replace(FxHashMap::default())
}
}
@ -47,44 +47,49 @@ impl CoverageInfoBuilderMethods<'tcx> for Builder<'a, 'll, 'tcx> {
&mut self,
instance: Instance<'tcx>,
function_source_hash: u64,
index: u32,
id: u32,
start_byte_pos: u32,
end_byte_pos: u32,
) {
debug!(
"adding counter to coverage_regions: instance={:?}, function_source_hash={}, index={}, byte range {}..{}",
instance, function_source_hash, index, start_byte_pos, end_byte_pos,
"adding counter to coverage_regions: instance={:?}, function_source_hash={}, id={}, \
byte range {}..{}",
instance, function_source_hash, id, start_byte_pos, end_byte_pos,
);
let mut coverage_regions = self.coverage_context().function_coverage_map.borrow_mut();
coverage_regions
.entry(instance)
.or_insert_with(|| {
FunctionCoverage::with_coverageinfo(self.tcx.coverageinfo(instance.def_id()))
})
.add_counter(function_source_hash, index, start_byte_pos, end_byte_pos);
.or_insert_with(|| FunctionCoverage::new(self.tcx, instance))
.add_counter(function_source_hash, id, start_byte_pos, end_byte_pos);
}
fn add_counter_expression_region(
&mut self,
instance: Instance<'tcx>,
index: u32,
id_descending_from_max: u32,
lhs: u32,
op: CounterOp,
op: ExprKind,
rhs: u32,
start_byte_pos: u32,
end_byte_pos: u32,
) {
debug!(
"adding counter expression to coverage_regions: instance={:?}, index={}, {} {:?} {}, byte range {}..{}",
instance, index, lhs, op, rhs, start_byte_pos, end_byte_pos,
"adding counter expression to coverage_regions: instance={:?}, id={}, {} {:?} {}, \
byte range {}..{}",
instance, id_descending_from_max, lhs, op, rhs, start_byte_pos, end_byte_pos,
);
let mut coverage_regions = self.coverage_context().function_coverage_map.borrow_mut();
coverage_regions
.entry(instance)
.or_insert_with(|| {
FunctionCoverage::with_coverageinfo(self.tcx.coverageinfo(instance.def_id()))
})
.add_counter_expression(index, lhs, op, rhs, start_byte_pos, end_byte_pos);
.or_insert_with(|| FunctionCoverage::new(self.tcx, instance))
.add_counter_expression(
id_descending_from_max,
lhs,
op,
rhs,
start_byte_pos,
end_byte_pos,
);
}
fn add_unreachable_region(
@ -100,107 +105,150 @@ impl CoverageInfoBuilderMethods<'tcx> for Builder<'a, 'll, 'tcx> {
let mut coverage_regions = self.coverage_context().function_coverage_map.borrow_mut();
coverage_regions
.entry(instance)
.or_insert_with(|| {
FunctionCoverage::with_coverageinfo(self.tcx.coverageinfo(instance.def_id()))
})
.add_unreachable(start_byte_pos, end_byte_pos);
.or_insert_with(|| FunctionCoverage::new(self.tcx, instance))
.add_unreachable_region(start_byte_pos, end_byte_pos);
}
}
/// This struct wraps an opaque reference to the C++ template instantiation of
/// `llvm::SmallVector<coverage::CounterExpression>`. Each `coverage::CounterExpression` object is
/// constructed from primative-typed arguments, and pushed to the `SmallVector`, in the C++
/// implementation of `LLVMRustCoverageSmallVectorCounterExpressionAdd()` (see
/// `src/rustllvm/CoverageMappingWrapper.cpp`).
pub struct SmallVectorCounterExpression<'a> {
pub raw: &'a mut llvm::coverageinfo::SmallVectorCounterExpression<'a>,
/// Aligns to C++ struct llvm::coverage::Counter::CounterKind.
/// The order of discrimiators is important.
#[derive(Copy, Clone, Debug)]
#[repr(C)]
enum RegionKind {
/// A CodeRegion associates some code with a counter
CodeRegion,
/// An ExpansionRegion represents a file expansion region that associates
/// a source range with the expansion of a virtual source file, such as
/// for a macro instantiation or #include file.
ExpansionRegion,
/// A SkippedRegion represents a source range with code that was skipped
/// by a preprocessor or similar means.
SkippedRegion,
/// A GapRegion is like a CodeRegion, but its count is only set as the
/// line execution count when its the only region in the line.
GapRegion,
}
impl SmallVectorCounterExpression<'a> {
pub fn new() -> Self {
SmallVectorCounterExpression {
raw: unsafe { llvm::LLVMRustCoverageSmallVectorCounterExpressionCreate() },
}
}
/// This struct provides LLVM's representation of a "CoverageMappingRegion", encoded into the
/// coverage map in accordance with LLVM's "Coverage Mapping Format". The struct composes fields
/// representing the `Counter` type and value(s) (injected counter ID, or expression type and
/// operands), the source file (an indirect index into a "filenames array", encoded separately),
/// and source location (start and end positions of the represented code region).
///
/// Aligns to C++ struct llvm::coverage::CounterMappingRegion.
/// The order of fields is important.
#[derive(Copy, Clone, Debug)]
#[repr(C)]
pub struct CounterMappingRegion {
/// The counter type and type-dependent counter data, if any.
counter: Counter,
pub fn as_ptr(&self) -> *const llvm::coverageinfo::SmallVectorCounterExpression<'a> {
self.raw
}
/// An indirect reference to the source filename. In the LLVM Coverage Mapping Format, the
/// file_id is an index into a function-specific `virtual_file_mapping` array of indexes that,
/// in turn, are used to look up the filename for this region.
file_id: u32,
pub fn push_from(
&mut self,
kind: rustc_codegen_ssa::coverageinfo::CounterOp,
left_index: u32,
right_index: u32,
) {
unsafe {
llvm::LLVMRustCoverageSmallVectorCounterExpressionAdd(
&mut *(self.raw as *mut _),
kind,
left_index,
right_index,
)
}
}
/// If the `RegionKind` is an `ExpansionRegion`, the `expanded_file_id` can be used to find the
/// mapping regions created as a result of macro expansion, by checking if their file id matches
/// the expanded file id.
expanded_file_id: u32,
/// 1-based starting line of the mapping region.
start_line: u32,
/// 1-based starting column of the mapping region.
start_col: u32,
/// 1-based ending line of the mapping region.
end_line: u32,
/// 1-based ending column of the mapping region. If the high bit is set, the current mapping
/// region is a gap area.
end_col: u32,
kind: RegionKind,
}
impl Drop for SmallVectorCounterExpression<'a> {
fn drop(&mut self) {
unsafe {
llvm::LLVMRustCoverageSmallVectorCounterExpressionDispose(&mut *(self.raw as *mut _));
}
}
}
/// This struct wraps an opaque reference to the C++ template instantiation of
/// `llvm::SmallVector<coverage::CounterMappingRegion>`. Each `coverage::CounterMappingRegion`
/// object is constructed from primative-typed arguments, and pushed to the `SmallVector`, in the
/// C++ implementation of `LLVMRustCoverageSmallVectorCounterMappingRegionAdd()` (see
/// `src/rustllvm/CoverageMappingWrapper.cpp`).
pub struct SmallVectorCounterMappingRegion<'a> {
pub raw: &'a mut llvm::coverageinfo::SmallVectorCounterMappingRegion<'a>,
}
impl SmallVectorCounterMappingRegion<'a> {
pub fn new() -> Self {
SmallVectorCounterMappingRegion {
raw: unsafe { llvm::LLVMRustCoverageSmallVectorCounterMappingRegionCreate() },
}
}
pub fn as_ptr(&self) -> *const llvm::coverageinfo::SmallVectorCounterMappingRegion<'a> {
self.raw
}
pub fn push_from(
&mut self,
index: u32,
impl CounterMappingRegion {
pub fn code_region(
counter: Counter,
file_id: u32,
line_start: u32,
column_start: u32,
line_end: u32,
column_end: u32,
) {
unsafe {
llvm::LLVMRustCoverageSmallVectorCounterMappingRegionAdd(
&mut *(self.raw as *mut _),
index,
file_id,
line_start,
column_start,
line_end,
column_end,
)
start_line: u32,
start_col: u32,
end_line: u32,
end_col: u32,
) -> Self {
Self {
counter,
file_id,
expanded_file_id: 0,
start_line,
start_col,
end_line,
end_col,
kind: RegionKind::CodeRegion,
}
}
}
impl Drop for SmallVectorCounterMappingRegion<'a> {
fn drop(&mut self) {
unsafe {
llvm::LLVMRustCoverageSmallVectorCounterMappingRegionDispose(
&mut *(self.raw as *mut _),
);
pub fn expansion_region(
file_id: u32,
expanded_file_id: u32,
start_line: u32,
start_col: u32,
end_line: u32,
end_col: u32,
) -> Self {
Self {
counter: Counter::zero(),
file_id,
expanded_file_id,
start_line,
start_col,
end_line,
end_col,
kind: RegionKind::ExpansionRegion,
}
}
pub fn skipped_region(
file_id: u32,
start_line: u32,
start_col: u32,
end_line: u32,
end_col: u32,
) -> Self {
Self {
counter: Counter::zero(),
file_id,
expanded_file_id: 0,
start_line,
start_col,
end_line,
end_col,
kind: RegionKind::SkippedRegion,
}
}
pub fn gap_region(
counter: Counter,
file_id: u32,
start_line: u32,
start_col: u32,
end_line: u32,
end_col: u32,
) -> Self {
Self {
counter,
file_id,
expanded_file_id: 0,
start_line,
start_col,
end_line,
end_col: ((1 as u32) << 31) | end_col,
kind: RegionKind::GapRegion,
}
}
}
@ -218,8 +266,8 @@ pub(crate) fn write_filenames_section_to_buffer(filenames: &Vec<CString>, buffer
pub(crate) fn write_mapping_to_buffer(
virtual_file_mapping: Vec<u32>,
expressions: SmallVectorCounterExpression<'_>,
mapping_regions: SmallVectorCounterMappingRegion<'_>,
expressions: Vec<CounterExpression>,
mut mapping_regions: Vec<CounterMappingRegion>,
buffer: &RustString,
) {
unsafe {
@ -227,7 +275,9 @@ pub(crate) fn write_mapping_to_buffer(
virtual_file_mapping.as_ptr(),
virtual_file_mapping.len() as c_uint,
expressions.as_ptr(),
mapping_regions.as_ptr(),
expressions.len() as c_uint,
mapping_regions.as_mut_ptr(),
mapping_regions.len() as c_uint,
buffer,
);
}

View File

@ -13,7 +13,7 @@ use rustc_ast::ast;
use rustc_codegen_ssa::base::{compare_simd_types, to_immediate, wants_msvc_seh};
use rustc_codegen_ssa::common::span_invalid_monomorphization_error;
use rustc_codegen_ssa::common::{IntPredicate, TypeKind};
use rustc_codegen_ssa::coverageinfo::CounterOp;
use rustc_codegen_ssa::coverageinfo::ExprKind;
use rustc_codegen_ssa::glue;
use rustc_codegen_ssa::mir::operand::{OperandRef, OperandValue};
use rustc_codegen_ssa::mir::place::PlaceRef;
@ -101,7 +101,7 @@ impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> {
self.add_counter_region(
caller_instance,
op_to_u64(&args[FUNCTION_SOURCE_HASH]),
op_to_u32(&args[COUNTER_INDEX]),
op_to_u32(&args[COUNTER_ID]),
op_to_u32(&args[START_BYTE_POS]),
op_to_u32(&args[END_BYTE_POS]),
);
@ -111,14 +111,14 @@ impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> {
use coverage::coverage_counter_expression_args::*;
self.add_counter_expression_region(
caller_instance,
op_to_u32(&args[COUNTER_EXPRESSION_INDEX]),
op_to_u32(&args[LEFT_INDEX]),
op_to_u32(&args[EXPRESSION_ID]),
op_to_u32(&args[LEFT_ID]),
if intrinsic == sym::coverage_counter_add {
CounterOp::Add
ExprKind::Add
} else {
CounterOp::Subtract
ExprKind::Subtract
},
op_to_u32(&args[RIGHT_INDEX]),
op_to_u32(&args[RIGHT_ID]),
op_to_u32(&args[START_BYTE_POS]),
op_to_u32(&args[END_BYTE_POS]),
);
@ -219,7 +219,7 @@ impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> {
let num_counters = self.const_u32(coverageinfo.num_counters);
use coverage::count_code_region_args::*;
let hash = args[FUNCTION_SOURCE_HASH].immediate();
let index = args[COUNTER_INDEX].immediate();
let index = args[COUNTER_ID].immediate();
debug!(
"translating Rust intrinsic `count_code_region()` to LLVM intrinsic: \
instrprof.increment(fn_name={}, hash={:?}, num_counters={:?}, index={:?})",

View File

@ -1,7 +1,7 @@
#![allow(non_camel_case_types)]
#![allow(non_upper_case_globals)]
use super::coverageinfo::{SmallVectorCounterExpression, SmallVectorCounterMappingRegion};
use crate::coverageinfo::CounterMappingRegion;
use super::debuginfo::{
DIArray, DIBasicType, DIBuilder, DICompositeType, DIDerivedType, DIDescriptor, DIEnumerator,
@ -652,16 +652,6 @@ pub struct Linker<'a>(InvariantOpaque<'a>);
pub type DiagnosticHandler = unsafe extern "C" fn(&DiagnosticInfo, *mut c_void);
pub type InlineAsmDiagHandler = unsafe extern "C" fn(&SMDiagnostic, *const c_void, c_uint);
pub mod coverageinfo {
use super::InvariantOpaque;
#[repr(C)]
pub struct SmallVectorCounterExpression<'a>(InvariantOpaque<'a>);
#[repr(C)]
pub struct SmallVectorCounterMappingRegion<'a>(InvariantOpaque<'a>);
}
pub mod debuginfo {
use super::{InvariantOpaque, Metadata};
use bitflags::bitflags;
@ -1645,33 +1635,6 @@ extern "C" {
ConstraintsLen: size_t,
) -> bool;
pub fn LLVMRustCoverageSmallVectorCounterExpressionCreate()
-> &'a mut SmallVectorCounterExpression<'a>;
pub fn LLVMRustCoverageSmallVectorCounterExpressionDispose(
Container: &'a mut SmallVectorCounterExpression<'a>,
);
pub fn LLVMRustCoverageSmallVectorCounterExpressionAdd(
Container: &mut SmallVectorCounterExpression<'a>,
Kind: rustc_codegen_ssa::coverageinfo::CounterOp,
LeftIndex: c_uint,
RightIndex: c_uint,
);
pub fn LLVMRustCoverageSmallVectorCounterMappingRegionCreate()
-> &'a mut SmallVectorCounterMappingRegion<'a>;
pub fn LLVMRustCoverageSmallVectorCounterMappingRegionDispose(
Container: &'a mut SmallVectorCounterMappingRegion<'a>,
);
pub fn LLVMRustCoverageSmallVectorCounterMappingRegionAdd(
Container: &mut SmallVectorCounterMappingRegion<'a>,
Index: c_uint,
FileID: c_uint,
LineStart: c_uint,
ColumnStart: c_uint,
LineEnd: c_uint,
ColumnEnd: c_uint,
);
#[allow(improper_ctypes)]
pub fn LLVMRustCoverageWriteFilenamesSectionToBuffer(
Filenames: *const *const c_char,
@ -1683,8 +1646,10 @@ extern "C" {
pub fn LLVMRustCoverageWriteMappingToBuffer(
VirtualFileMappingIDs: *const c_uint,
NumVirtualFileMappingIDs: c_uint,
Expressions: *const SmallVectorCounterExpression<'_>,
MappingRegions: *const SmallVectorCounterMappingRegion<'_>,
Expressions: *const rustc_codegen_ssa::coverageinfo::map::CounterExpression,
NumExpressions: c_uint,
MappingRegions: *mut CounterMappingRegion,
NumMappingRegions: c_uint,
BufferOut: &RustString,
);

View File

@ -1,289 +1,376 @@
use rustc_data_structures::sync::Lrc;
use rustc_middle::mir;
use rustc_span::source_map::{Pos, SourceFile, SourceMap};
use rustc_span::{BytePos, FileName, RealFileName};
use rustc_middle::ty::Instance;
use rustc_middle::ty::TyCtxt;
use rustc_span::source_map::{Pos, SourceMap};
use rustc_span::{BytePos, FileName, Loc, RealFileName};
use std::cmp::{Ord, Ordering};
use std::collections::BTreeMap;
use std::fmt;
use std::path::PathBuf;
/// Aligns to C++ struct llvm::coverage::Counter::CounterKind.
/// The order of discriminators is important.
#[derive(Copy, Clone, Debug)]
#[repr(C)]
pub enum CounterOp {
// Note the order (and therefore the default values) is important. With the attribute
// `#[repr(C)]`, this enum matches the layout of the LLVM enum defined for the nested enum,
// `llvm::coverage::CounterExpression::ExprKind`, as shown in the following source snippet:
// https://github.com/rust-lang/llvm-project/blob/f208b70fbc4dee78067b3c5bd6cb92aa3ba58a1e/llvm/include/llvm/ProfileData/Coverage/CoverageMapping.h#L146
enum CounterKind {
Zero,
CounterValueReference,
Expression,
}
/// Aligns to C++ struct llvm::coverage::Counter. Note that `id` has
/// different interpretations, depending on the `kind`:
/// * For `CounterKind::Zero`, `id` is assumed to be `0`
/// * For `CounterKind::CounterValueReference`, `id` matches the `counter_id` of the injected
/// instrumentation counter (the `index` argument to the LLVM intrinsic `instrprof.increment()`)
/// * For `CounterKind::Expression`, `id` is the index into the array of counter expressions.
/// The order of fields is important.
#[derive(Copy, Clone, Debug)]
#[repr(C)]
pub struct Counter {
kind: CounterKind,
id: u32,
}
impl Counter {
pub fn zero() -> Self {
Self { kind: CounterKind::Zero, id: 0 }
}
pub fn counter_value_reference(counter_id: u32) -> Self {
Self { kind: CounterKind::CounterValueReference, id: counter_id }
}
pub fn expression(final_expression_index: u32) -> Self {
Self { kind: CounterKind::Expression, id: final_expression_index }
}
}
/// Aligns to C++ struct llvm::coverage::CounterExpression::ExprKind.
/// The order of discriminators is important.
#[derive(Copy, Clone, Debug)]
#[repr(C)]
pub enum ExprKind {
Subtract,
Add,
}
/// Aligns to C++ struct llvm::coverage::CounterExpression.
/// The order of fields is important.
#[derive(Copy, Clone, Debug)]
pub enum CoverageKind {
Counter,
CounterExpression(u32, CounterOp, u32),
Unreachable,
#[repr(C)]
pub struct CounterExpression {
// Note the field order is important.
kind: ExprKind,
lhs: Counter,
rhs: Counter,
}
impl CounterExpression {
pub fn new(lhs: Counter, kind: ExprKind, rhs: Counter) -> Self {
Self { kind, lhs, rhs }
}
}
#[derive(Clone, Debug)]
pub struct CoverageRegion {
pub kind: CoverageKind,
pub start_byte_pos: u32,
pub end_byte_pos: u32,
pub struct Region {
start: Loc,
end: Loc,
}
impl CoverageRegion {
pub fn source_loc(&self, source_map: &SourceMap) -> Option<(Lrc<SourceFile>, CoverageLoc)> {
let (start_file, start_line, start_col) =
lookup_file_line_col(source_map, BytePos::from_u32(self.start_byte_pos));
let (end_file, end_line, end_col) =
lookup_file_line_col(source_map, BytePos::from_u32(self.end_byte_pos));
let start_file_path = match &start_file.name {
FileName::Real(RealFileName::Named(path)) => path,
_ => {
bug!("start_file_path should be a RealFileName, but it was: {:?}", start_file.name)
}
};
let end_file_path = match &end_file.name {
FileName::Real(RealFileName::Named(path)) => path,
_ => bug!("end_file_path should be a RealFileName, but it was: {:?}", end_file.name),
};
if start_file_path == end_file_path {
Some((start_file, CoverageLoc { start_line, start_col, end_line, end_col }))
} else {
None
// FIXME(richkadel): There seems to be a problem computing the file location in
// some cases. I need to investigate this more. When I generate and show coverage
// for the example binary in the crates.io crate `json5format`, I had a couple of
// notable problems:
//
// 1. I saw a lot of coverage spans in `llvm-cov show` highlighting regions in
// various comments (not corresponding to rustdoc code), indicating a possible
// problem with the byte_pos-to-source-map implementation.
//
// 2. And (perhaps not related) when I build the aforementioned example binary with:
// `RUST_FLAGS="-Zinstrument-coverage" cargo build --example formatjson5`
// and then run that binary with
// `LLVM_PROFILE_FILE="formatjson5.profraw" ./target/debug/examples/formatjson5 \
// some.json5` for some reason the binary generates *TWO* `.profraw` files. One
// named `default.profraw` and the other named `formatjson5.profraw` (the expected
// name, in this case).
//
// If the byte range conversion is wrong, fix it. But if it
// is right, then it is possible for the start and end to be in different files.
// Can I do something other than ignore coverages that span multiple files?
//
// If I can resolve this, remove the "Option<>" result type wrapper
// `regions_in_file_order()` accordingly.
}
}
}
impl Default for CoverageRegion {
fn default() -> Self {
Self {
// The default kind (Unreachable) is a placeholder that will be overwritten before
// backend codegen.
kind: CoverageKind::Unreachable,
start_byte_pos: 0,
end_byte_pos: 0,
}
}
}
/// A source code region used with coverage information.
#[derive(Debug, Eq, PartialEq)]
pub struct CoverageLoc {
/// The (1-based) line number of the region start.
pub start_line: u32,
/// The (1-based) column number of the region start.
pub start_col: u32,
/// The (1-based) line number of the region end.
pub end_line: u32,
/// The (1-based) column number of the region end.
pub end_col: u32,
}
impl Ord for CoverageLoc {
impl Ord for Region {
fn cmp(&self, other: &Self) -> Ordering {
(self.start_line, &self.start_col, &self.end_line, &self.end_col).cmp(&(
other.start_line,
&other.start_col,
&other.end_line,
&other.end_col,
))
(&self.start.file.name, &self.start.line, &self.start.col, &self.end.line, &self.end.col)
.cmp(&(
&other.start.file.name,
&other.start.line,
&other.start.col,
&other.end.line,
&other.end.col,
))
}
}
impl PartialOrd for CoverageLoc {
impl PartialOrd for Region {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl fmt::Display for CoverageLoc {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// Customize debug format, and repeat the file name, so generated location strings are
// "clickable" in many IDEs.
write!(f, "{}:{} - {}:{}", self.start_line, self.start_col, self.end_line, self.end_col)
impl PartialEq for Region {
fn eq(&self, other: &Self) -> bool {
self.start.file.name == other.start.file.name
&& self.start.line == other.start.line
&& self.start.col == other.start.col
&& self.end.line == other.end.line
&& self.end.col == other.end.col
}
}
fn lookup_file_line_col(source_map: &SourceMap, byte_pos: BytePos) -> (Lrc<SourceFile>, u32, u32) {
let found = source_map
.lookup_line(byte_pos)
.expect("should find coverage region byte position in source");
let file = found.sf;
let line_pos = file.line_begin_pos(byte_pos);
impl Eq for Region {}
// Use 1-based indexing.
let line = (found.line + 1) as u32;
let col = (byte_pos - line_pos).to_u32() + 1;
(file, line, col)
impl fmt::Display for Region {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let (file_path, start_line, start_col, end_line, end_col) = self.file_start_and_end();
write!(f, "{:?}:{}:{} - {}:{}", file_path, start_line, start_col, end_line, end_col)
}
}
impl Region {
pub fn new(source_map: &SourceMap, start_byte_pos: u32, end_byte_pos: u32) -> Self {
let start = source_map.lookup_char_pos(BytePos::from_u32(start_byte_pos));
let end = source_map.lookup_char_pos(BytePos::from_u32(end_byte_pos));
assert_eq!(start.file.name, end.file.name);
Self { start, end }
}
pub fn file_start_and_end<'a>(&'a self) -> (&'a PathBuf, u32, u32, u32, u32) {
let start = &self.start;
let end = &self.end;
match &start.file.name {
FileName::Real(RealFileName::Named(path)) => (
path,
start.line as u32,
start.col.to_u32() + 1,
end.line as u32,
end.col.to_u32() + 1,
),
_ => {
bug!("start.file.name should be a RealFileName, but it was: {:?}", start.file.name)
}
}
}
}
#[derive(Clone, Debug)]
pub struct ExpressionRegion {
lhs: u32,
op: ExprKind,
rhs: u32,
region: Region,
}
// FIXME(richkadel): There seems to be a problem computing the file location in
// some cases. I need to investigate this more. When I generate and show coverage
// for the example binary in the crates.io crate `json5format`, I had a couple of
// notable problems:
//
// 1. I saw a lot of coverage spans in `llvm-cov show` highlighting regions in
// various comments (not corresponding to rustdoc code), indicating a possible
// problem with the byte_pos-to-source-map implementation.
//
// 2. And (perhaps not related) when I build the aforementioned example binary with:
// `RUST_FLAGS="-Zinstrument-coverage" cargo build --example formatjson5`
// and then run that binary with
// `LLVM_PROFILE_FILE="formatjson5.profraw" ./target/debug/examples/formatjson5 \
// some.json5` for some reason the binary generates *TWO* `.profraw` files. One
// named `default.profraw` and the other named `formatjson5.profraw` (the expected
// name, in this case).
//
// 3. I think that if I eliminate regions within a function, their region_ids,
// referenced in expressions, will be wrong? I think the ids are implied by their
// array position in the final coverage map output (IIRC).
//
// 4. I suspect a problem (if not the only problem) is the SourceMap is wrong for some
// region start/end byte positions. Just like I couldn't get the function hash at
// intrinsic codegen time for external crate functions, I think the SourceMap I
// have here only applies to the local crate, and I know I have coverages that
// reference external crates.
//
// I still don't know if I fixed the hash problem correctly. If external crates
// implement the function, can't I use the coverage counters already compiled
// into those external crates? (Maybe not for generics and/or maybe not for
// macros... not sure. But I need to understand this better.)
//
// If the byte range conversion is wrong, fix it. But if it
// is right, then it is possible for the start and end to be in different files.
// Can I do something other than ignore coverages that span multiple files?
//
// If I can resolve this, remove the "Option<>" result type wrapper
// `regions_in_file_order()` accordingly.
/// Collects all of the coverage regions associated with (a) injected counters, (b) counter
/// expressions (additions or subtraction), and (c) unreachable regions (always counted as zero),
/// for a given Function. Counters and counter expressions are indexed because they can be operands
/// in an expression. This struct also stores the `function_source_hash`, computed during
/// instrumentation and forwarded with counters.
/// for a given Function. Counters and counter expressions have non-overlapping `id`s because they
/// can both be operands in an expression. This struct also stores the `function_source_hash`,
/// computed during instrumentation, and forwarded with counters.
///
/// Note, it's important to distinguish the `unreachable` region type from what LLVM's refers to as
/// a "gap region" (or "gap area"). A gap region is a code region within a counted region (either
/// counter or expression), but the line or lines in the gap region are not executable (such as
/// lines with only whitespace or comments). According to LLVM Code Coverage Mapping documentation,
/// "A count for a gap area is only used as the line execution count if there are no other regions
/// on a line."
pub struct FunctionCoverage {
/// Note, it may be important to understand LLVM's definitions of `unreachable` regions versus "gap
/// regions" (or "gap areas"). A gap region is a code region within a counted region (either counter
/// or expression), but the line or lines in the gap region are not executable (such as lines with
/// only whitespace or comments). According to LLVM Code Coverage Mapping documentation, "A count
/// for a gap area is only used as the line execution count if there are no other regions on a
/// line."
pub struct FunctionCoverage<'a> {
source_map: &'a SourceMap,
source_hash: u64,
counters: Vec<CoverageRegion>,
expressions: Vec<CoverageRegion>,
unreachable: Vec<CoverageRegion>,
translated: bool,
counters: Vec<Option<Region>>,
expressions: Vec<Option<ExpressionRegion>>,
unreachable_regions: Vec<Region>,
}
impl FunctionCoverage {
pub fn with_coverageinfo<'tcx>(coverageinfo: &'tcx mir::CoverageInfo) -> Self {
impl<'a> FunctionCoverage<'a> {
pub fn new<'tcx: 'a>(tcx: TyCtxt<'tcx>, instance: Instance<'tcx>) -> Self {
let coverageinfo = tcx.coverageinfo(instance.def_id());
Self {
source_map: tcx.sess.source_map(),
source_hash: 0, // will be set with the first `add_counter()`
counters: vec![CoverageRegion::default(); coverageinfo.num_counters as usize],
expressions: vec![CoverageRegion::default(); coverageinfo.num_expressions as usize],
unreachable: Vec::new(),
translated: false,
counters: vec![None; coverageinfo.num_counters as usize],
expressions: vec![None; coverageinfo.num_expressions as usize],
unreachable_regions: Vec::new(),
}
}
/// Adds a code region to be counted by an injected counter intrinsic. Return a counter ID
/// for the call.
/// Adds a code region to be counted by an injected counter intrinsic.
/// The source_hash (computed during coverage instrumentation) should also be provided, and
/// should be the same for all counters in a given function.
pub fn add_counter(
&mut self,
source_hash: u64,
index: u32,
id: u32,
start_byte_pos: u32,
end_byte_pos: u32,
) {
self.source_hash = source_hash;
self.counters[index as usize] =
CoverageRegion { kind: CoverageKind::Counter, start_byte_pos, end_byte_pos };
if self.source_hash == 0 {
self.source_hash = source_hash;
} else {
debug_assert_eq!(source_hash, self.source_hash);
}
self.counters[id as usize]
.replace(Region::new(self.source_map, start_byte_pos, end_byte_pos))
.expect_none("add_counter called with duplicate `id`");
}
/// Both counters and "counter expressions" (or simply, "expressions") can be operands in other
/// expressions. Expression IDs start from `u32::MAX` and go down, so the range of expression
/// IDs will not overlap with the range of counter IDs. Counters and expressions can be added in
/// any order, and expressions can still be assigned contiguous (though descending) IDs, without
/// knowing what the last counter ID will be.
///
/// When storing the expression data in the `expressions` vector in the `FunctionCoverage`
/// struct, its vector index is computed, from the given expression ID, by subtracting from
/// `u32::MAX`.
///
/// Since the expression operands (`lhs` and `rhs`) can reference either counters or
/// expressions, an operand that references an expression also uses its original ID, descending
/// from `u32::MAX`. Theses operands are translated only during code generation, after all
/// counters and expressions have been added.
pub fn add_counter_expression(
&mut self,
translated_index: u32,
id_descending_from_max: u32,
lhs: u32,
op: CounterOp,
op: ExprKind,
rhs: u32,
start_byte_pos: u32,
end_byte_pos: u32,
) {
let index = u32::MAX - translated_index;
// Counter expressions start with "translated indexes", descending from `u32::MAX`, so
// the range of expression indexes is disjoint from the range of counter indexes. This way,
// both counters and expressions can be operands in other expressions.
//
// Once all counters have been added, the final "region index" for an expression is
// `counters.len() + expression_index` (where `expression_index` is its index in
// `self.expressions`), and the expression operands (`lhs` and `rhs`) can be converted to
// final "region index" references by the same conversion, after subtracting from
// `u32::MAX`.
self.expressions[index as usize] = CoverageRegion {
kind: CoverageKind::CounterExpression(lhs, op, rhs),
start_byte_pos,
end_byte_pos,
};
let expression_index = self.expression_index(id_descending_from_max);
self.expressions[expression_index]
.replace(ExpressionRegion {
lhs,
op,
rhs,
region: Region::new(self.source_map, start_byte_pos, end_byte_pos),
})
.expect_none("add_counter_expression called with duplicate `id_descending_from_max`");
}
pub fn add_unreachable(&mut self, start_byte_pos: u32, end_byte_pos: u32) {
self.unreachable.push(CoverageRegion {
kind: CoverageKind::Unreachable,
start_byte_pos,
end_byte_pos,
});
/// Add a region that will be marked as "unreachable", with a constant "zero counter".
pub fn add_unreachable_region(&mut self, start_byte_pos: u32, end_byte_pos: u32) {
self.unreachable_regions.push(Region::new(self.source_map, start_byte_pos, end_byte_pos));
}
/// Return the source hash, generated from the HIR node structure, and used to indicate whether
/// or not the source code structure changed between different compilations.
pub fn source_hash(&self) -> u64 {
self.source_hash
}
fn regions(&'a mut self) -> impl Iterator<Item = &'a CoverageRegion> {
/// Generate an array of CounterExpressions, and an iterator over all `Counter`s and their
/// associated `Regions` (from which the LLVM-specific `CoverageMapGenerator` will create
/// `CounterMappingRegion`s.
pub fn get_expressions_and_counter_regions(
&'a self,
) -> (Vec<CounterExpression>, impl Iterator<Item = (Counter, &'a Region)>) {
assert!(self.source_hash != 0);
self.ensure_expressions_translated();
self.counters.iter().chain(self.expressions.iter().chain(self.unreachable.iter()))
let counter_regions = self.counter_regions();
let (expressions, expression_regions) = self.expressions_with_regions();
let unreachable_regions = self.unreachable_regions();
let counter_regions =
counter_regions.chain(expression_regions.into_iter().chain(unreachable_regions));
(expressions, counter_regions)
}
pub fn regions_in_file_order(
&'a mut self,
source_map: &SourceMap,
) -> BTreeMap<PathBuf, BTreeMap<CoverageLoc, (usize, CoverageKind)>> {
let mut regions_in_file_order = BTreeMap::new();
for (region_id, region) in self.regions().enumerate() {
if let Some((source_file, region_loc)) = region.source_loc(source_map) {
// FIXME(richkadel): `region.source_loc()` sometimes fails with two different
// filenames for the start and end byte position. This seems wrong, but for
// now, if encountered, the region is skipped. If resolved, convert the result
// to a non-option value so regions are never skipped.
let real_file_path = match &(*source_file).name {
FileName::Real(RealFileName::Named(path)) => path.clone(),
_ => bug!("coverage mapping expected only real, named files"),
};
let file_coverage_regions =
regions_in_file_order.entry(real_file_path).or_insert_with(|| BTreeMap::new());
file_coverage_regions.insert(region_loc, (region_id, region.kind));
fn counter_regions(&'a self) -> impl Iterator<Item = (Counter, &'a Region)> {
self.counters.iter().enumerate().filter_map(|(index, entry)| {
// Option::map() will return None to filter out missing counters. This may happen
// if, for example, a MIR-instrumented counter is removed during an optimization.
entry.as_ref().map(|region| (Counter::counter_value_reference(index as u32), region))
})
}
fn expressions_with_regions(
&'a self,
) -> (Vec<CounterExpression>, impl Iterator<Item = (Counter, &'a Region)>) {
let mut counter_expressions = Vec::with_capacity(self.expressions.len());
let mut expression_regions = Vec::with_capacity(self.expressions.len());
let mut new_indexes = vec![u32::MAX; self.expressions.len()];
// Note that an `ExpressionRegion`s at any given index can include other expressions as
// operands, but expression operands can only come from the subset of expressions having
// `expression_index`s lower than the referencing `ExpressionRegion`. Therefore, it is
// reasonable to look up the new index of an expression operand while the `new_indexes`
// vector is only complete up to the current `ExpressionIndex`.
let id_to_counter = |new_indexes: &Vec<u32>, id| {
if id < self.counters.len() as u32 {
self.counters
.get(id as usize)
.expect("id is out of range")
.as_ref()
.map(|_| Counter::counter_value_reference(id))
} else {
let index = self.expression_index(id);
self.expressions
.get(index)
.expect("id is out of range")
.as_ref()
.map(|_| Counter::expression(new_indexes[index]))
}
};
for (original_index, expression_region) in
self.expressions.iter().enumerate().filter_map(|(original_index, entry)| {
// Option::map() will return None to filter out missing expressions. This may happen
// if, for example, a MIR-instrumented expression is removed during an optimization.
entry.as_ref().map(|region| (original_index, region))
})
{
let region = &expression_region.region;
let ExpressionRegion { lhs, op, rhs, .. } = *expression_region;
if let Some(Some((lhs_counter, rhs_counter))) =
id_to_counter(&new_indexes, lhs).map(|lhs_counter| {
id_to_counter(&new_indexes, rhs).map(|rhs_counter| (lhs_counter, rhs_counter))
})
{
// Both operands exist. `Expression` operands exist in `self.expressions` and have
// been assigned a `new_index`.
let final_expression_index = counter_expressions.len() as u32;
counter_expressions.push(CounterExpression::new(lhs_counter, op, rhs_counter));
new_indexes[original_index] = final_expression_index;
expression_regions.push((Counter::expression(final_expression_index), region));
}
}
regions_in_file_order
(counter_expressions, expression_regions.into_iter())
}
/// A one-time translation of expression operands is needed, for any operands referencing
/// other CounterExpressions. CounterExpression operands get an initial operand ID that is
/// computed by the simple translation: `u32::max - expression_index` because, when created,
/// the total number of Counters is not yet known. This function recomputes region indexes
/// for expressions so they start with the next region index after the last counter index.
fn ensure_expressions_translated(&mut self) {
if !self.translated {
self.translated = true;
let start = self.counters.len() as u32;
assert!(
(start as u64 + self.expressions.len() as u64) < u32::MAX as u64,
"the number of counters and counter expressions in a single function exceeds {}",
u32::MAX
);
for region in self.expressions.iter_mut() {
match region.kind {
CoverageKind::CounterExpression(lhs, op, rhs) => {
let lhs = to_region_index(start, lhs);
let rhs = to_region_index(start, rhs);
region.kind = CoverageKind::CounterExpression(lhs, op, rhs);
}
_ => bug!("expressions must only contain CounterExpression kinds"),
}
}
}
fn unreachable_regions(&'a self) -> impl Iterator<Item = (Counter, &'a Region)> {
self.unreachable_regions.iter().map(|region| (Counter::zero(), region))
}
fn expression_index(&self, id_descending_from_max: u32) -> usize {
debug_assert!(id_descending_from_max as usize >= self.counters.len());
(u32::MAX - id_descending_from_max) as usize
}
}
fn to_region_index(start: u32, index: u32) -> u32 {
if index < start { index } else { start + (u32::MAX - index) }
}

View File

@ -1,3 +1,3 @@
pub mod map;
pub use map::CounterOp;
pub use map::ExprKind;

View File

@ -1,5 +1,6 @@
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")]
#![feature(bool_to_option)]
#![feature(option_expect_none)]
#![feature(box_patterns)]
#![feature(try_blocks)]
#![feature(in_band_lifetimes)]

View File

@ -1,5 +1,5 @@
use super::BackendTypes;
use crate::coverageinfo::CounterOp;
use crate::coverageinfo::ExprKind;
use rustc_middle::ty::Instance;
pub trait CoverageInfoMethods: BackendTypes {
@ -21,7 +21,7 @@ pub trait CoverageInfoBuilderMethods<'tcx>: BackendTypes {
instance: Instance<'tcx>,
index: u32,
lhs: u32,
op: CounterOp,
op: ExprKind,
rhs: u32,
start_byte_pos: u32,
end_byte_pos: u32,

View File

@ -3,7 +3,7 @@
/// Positional arguments to `libcore::count_code_region()`
pub mod count_code_region_args {
pub const FUNCTION_SOURCE_HASH: usize = 0;
pub const COUNTER_INDEX: usize = 1;
pub const COUNTER_ID: usize = 1;
pub const START_BYTE_POS: usize = 2;
pub const END_BYTE_POS: usize = 3;
}
@ -11,9 +11,9 @@ pub mod count_code_region_args {
/// Positional arguments to `libcore::coverage_counter_add()` and
/// `libcore::coverage_counter_subtract()`
pub mod coverage_counter_expression_args {
pub const COUNTER_EXPRESSION_INDEX: usize = 0;
pub const LEFT_INDEX: usize = 1;
pub const RIGHT_INDEX: usize = 2;
pub const EXPRESSION_ID: usize = 0;
pub const LEFT_ID: usize = 1;
pub const RIGHT_ID: usize = 2;
pub const START_BYTE_POS: usize = 3;
pub const END_BYTE_POS: usize = 4;
}

View File

@ -58,7 +58,7 @@ fn coverageinfo_from_mir<'tcx>(tcx: TyCtxt<'tcx>, mir_def_id: DefId) -> Coverage
match func.literal.ty.kind {
FnDef(id, _) if id == count_code_region_fn => {
let index_arg =
args.get(count_code_region_args::COUNTER_INDEX).expect("arg found");
args.get(count_code_region_args::COUNTER_ID).expect("arg found");
let counter_index = mir::Operand::scalar_from_const(index_arg)
.to_u32()
.expect("index arg is u32");
@ -68,7 +68,7 @@ fn coverageinfo_from_mir<'tcx>(tcx: TyCtxt<'tcx>, mir_def_id: DefId) -> Coverage
if id == coverage_counter_add_fn || id == coverage_counter_subtract_fn =>
{
let index_arg = args
.get(coverage_counter_expression_args::COUNTER_EXPRESSION_INDEX)
.get(coverage_counter_expression_args::EXPRESSION_ID)
.expect("arg found");
let translated_index = mir::Operand::scalar_from_const(index_arg)
.to_u32()
@ -215,7 +215,7 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> {
debug_assert_eq!(FUNCTION_SOURCE_HASH, args.len());
args.push(self.const_u64(function_source_hash, injection_point));
debug_assert_eq!(COUNTER_INDEX, args.len());
debug_assert_eq!(COUNTER_ID, args.len());
args.push(self.const_u32(counter_id, injection_point));
debug_assert_eq!(START_BYTE_POS, args.len());
@ -255,13 +255,13 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> {
let mut args = Vec::new();
use coverage_counter_expression_args::*;
debug_assert_eq!(COUNTER_EXPRESSION_INDEX, args.len());
debug_assert_eq!(EXPRESSION_ID, args.len());
args.push(self.const_u32(expression_id, injection_point));
debug_assert_eq!(LEFT_INDEX, args.len());
debug_assert_eq!(LEFT_ID, args.len());
args.push(self.const_u32(lhs, injection_point));
debug_assert_eq!(RIGHT_INDEX, args.len());
debug_assert_eq!(RIGHT_ID, args.len());
args.push(self.const_u32(rhs, injection_point));
debug_assert_eq!(START_BYTE_POS, args.len());

View File

@ -343,8 +343,8 @@ fn mir_validated(
&promote_pass,
&simplify::SimplifyCfg::new("qualify-consts"),
// If the `instrument-coverage` option is enabled, analyze the CFG, identify each
// conditional branch, construct a coverage map to be passed to LLVM, and inject counters
// where needed.
// conditional branch, construct a coverage map to be passed to LLVM, and inject
// counters where needed.
&instrument_coverage::InstrumentCoverage,
]],
);

View File

@ -883,7 +883,7 @@ options! {DebuggingOptions, DebuggingSetter, basic_debugging_options,
"instrument the generated code to support LLVM source-based code coverage \
reports (note, the compiler build config must include `profiler = true`, \
and is mutually exclusive with `-C profile-generate`/`-C profile-use`); \
implies `-C link-dead-code` (unless explicitly disabled)` and
implies `-C link-dead-code` (unless explicitly disabled)` and \
`-Z symbol-mangling-version=v0`; and disables/overrides some optimization \
options (default: no)"),
instrument_mcount: bool = (false, parse_bool, [TRACKED],

View File

@ -8,60 +8,6 @@
using namespace llvm;
extern "C" SmallVectorTemplateBase<coverage::CounterExpression>
*LLVMRustCoverageSmallVectorCounterExpressionCreate() {
return new SmallVector<coverage::CounterExpression, 32>();
}
extern "C" void LLVMRustCoverageSmallVectorCounterExpressionDispose(
SmallVectorTemplateBase<coverage::CounterExpression> *Vector) {
delete Vector;
}
extern "C" void LLVMRustCoverageSmallVectorCounterExpressionAdd(
SmallVectorTemplateBase<coverage::CounterExpression> *Expressions,
coverage::CounterExpression::ExprKind Kind,
unsigned LeftIndex,
unsigned RightIndex) {
auto LHS = coverage::Counter::getCounter(LeftIndex);
auto RHS = coverage::Counter::getCounter(RightIndex);
Expressions->push_back(coverage::CounterExpression { Kind, LHS, RHS });
}
extern "C" SmallVectorTemplateBase<coverage::CounterMappingRegion>
*LLVMRustCoverageSmallVectorCounterMappingRegionCreate() {
return new SmallVector<coverage::CounterMappingRegion, 32>();
}
extern "C" void LLVMRustCoverageSmallVectorCounterMappingRegionDispose(
SmallVectorTemplateBase<coverage::CounterMappingRegion> *Vector) {
delete Vector;
}
extern "C" void LLVMRustCoverageSmallVectorCounterMappingRegionAdd(
SmallVectorTemplateBase<coverage::CounterMappingRegion> *MappingRegions,
unsigned Index,
unsigned FileID,
unsigned LineStart,
unsigned ColumnStart,
unsigned LineEnd,
unsigned ColumnEnd) {
auto Counter = coverage::Counter::getCounter(Index);
MappingRegions->push_back(coverage::CounterMappingRegion::makeRegion(
Counter, FileID, LineStart,
ColumnStart, LineEnd, ColumnEnd));
// FIXME(richkadel): As applicable, implement additional CounterMappingRegion types using the
// static method alternatives to `coverage::CounterMappingRegion::makeRegion`:
//
// makeExpansion(unsigned FileID, unsigned ExpandedFileID, unsigned LineStart,
// unsigned ColumnStart, unsigned LineEnd, unsigned ColumnEnd) {
// makeSkipped(unsigned FileID, unsigned LineStart, unsigned ColumnStart,
// unsigned LineEnd, unsigned ColumnEnd) {
// makeGapRegion(Counter Count, unsigned FileID, unsigned LineStart,
// unsigned ColumnStart, unsigned LineEnd, unsigned ColumnEnd) {
}
extern "C" void LLVMRustCoverageWriteFilenamesSectionToBuffer(
const char* const Filenames[],
size_t FilenamesLen,
@ -79,13 +25,15 @@ extern "C" void LLVMRustCoverageWriteFilenamesSectionToBuffer(
extern "C" void LLVMRustCoverageWriteMappingToBuffer(
const unsigned *VirtualFileMappingIDs,
unsigned NumVirtualFileMappingIDs,
const SmallVectorImpl<coverage::CounterExpression> *Expressions,
SmallVectorImpl<coverage::CounterMappingRegion> *MappingRegions,
const coverage::CounterExpression *Expressions,
unsigned NumExpressions,
coverage::CounterMappingRegion *MappingRegions,
unsigned NumMappingRegions,
RustStringRef BufferOut) {
auto CoverageMappingWriter = coverage::CoverageMappingWriter(
makeArrayRef(VirtualFileMappingIDs, NumVirtualFileMappingIDs),
makeArrayRef(*Expressions),
MutableArrayRef<coverage::CounterMappingRegion> { *MappingRegions });
makeArrayRef(VirtualFileMappingIDs, NumVirtualFileMappingIDs),
makeArrayRef(Expressions, NumExpressions),
makeMutableArrayRef(MappingRegions, NumMappingRegions));
RawRustStringOstream OS(BufferOut);
CoverageMappingWriter.write(OS);
}