Skip to content

Commit

Permalink
Generate allocation and release code
Browse files Browse the repository at this point in the history
Instead of allocations and releases calling functions provided by the
runtime library (inko_alloc() and inko_free()), the compiler now
generates the underlying code directly. Depending on the program used,
this can improve performance by 3-5%. While this isn't significant, it's
an easy win and help reduces the amount of code provided by the runtime
library.

See #542 for more details.

Changelog: performance
  • Loading branch information
yorickpeterse committed Apr 9, 2024
1 parent 1536f59 commit ecbe1a6
Show file tree
Hide file tree
Showing 13 changed files with 269 additions and 214 deletions.
5 changes: 0 additions & 5 deletions compiler/src/format.rs
Original file line number Diff line number Diff line change
Expand Up @@ -109,11 +109,6 @@ enum Node {
Indent(Vec<Node>),

/// A node of which the width should be reported as zero.
///
/// TODO: this node causes too many weird formatting issues, such as when a
/// ZeroWidth argument is followed by an array. This is because when
/// rendering, we think we have more space than we actually do, so we wrap
/// too late.
ZeroWidth(Box<Node>),

/// Indent the given nodes recursively, but only starting the next line.
Expand Down
54 changes: 54 additions & 0 deletions compiler/src/llvm/builder.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,7 @@
use crate::llvm::constants::{HEADER_CLASS_INDEX, HEADER_REFS_INDEX};
use crate::llvm::context::Context;
use crate::llvm::module::Module;
use crate::llvm::runtime_function::RuntimeFunction;
use inkwell::basic_block::BasicBlock;
use inkwell::builder;
use inkwell::debug_info::{
Expand All @@ -17,6 +20,7 @@ use inkwell::{
AddressSpace, AtomicOrdering, AtomicRMWBinOp, FloatPredicate, IntPredicate,
};
use std::path::Path;
use types::{ClassId, Database};

/// A wrapper around an LLVM Builder that provides some additional methods.
pub(crate) struct Builder<'ctx> {
Expand Down Expand Up @@ -652,6 +656,13 @@ impl<'ctx> Builder<'ctx> {
.unwrap()
}

pub(crate) fn pointer_is_null(
&self,
value: PointerValue<'ctx>,
) -> IntValue<'ctx> {
self.inner.build_is_null(value, "").unwrap()
}

pub(crate) fn bitcast<V: BasicValue<'ctx>, T: BasicType<'ctx>>(
&self,
value: V,
Expand Down Expand Up @@ -755,6 +766,49 @@ impl<'ctx> Builder<'ctx> {
pub(crate) fn set_debug_function(&self, function: DISubprogram) {
self.function.set_subprogram(function);
}

pub(crate) fn allocate<'a, 'b>(
&self,
module: &'a mut Module<'b, 'ctx>,
db: &Database,
names: &crate::symbol_names::SymbolNames,
class: ClassId,
) -> PointerValue<'ctx> {
let atomic = class.is_atomic(db);
let name = &names.classes[&class];
let global = module.add_class(class, name).as_pointer_value();
let class_ptr = self.load_untyped_pointer(global);
let size = module.layouts.size_of_class(class);
let err_func =
module.runtime_function(RuntimeFunction::AllocationError);
let alloc_func = module.runtime_function(RuntimeFunction::Allocate);
let size = self.u64_literal(size).into();
let res = self.call(alloc_func, &[size]).into_pointer_value();

let err_block = self.add_block();
let ok_block = self.add_block();
let is_null = self.pointer_is_null(res);
let header = module.layouts.header;

self.branch(is_null, err_block, ok_block);

// The block to jump to when the allocation failed.
self.switch_to_block(err_block);
self.call_void(err_func, &[class_ptr.into()]);
self.unreachable();

// The block to jump to when the allocation succeeds.
self.switch_to_block(ok_block);

// Atomic values start with a reference count of 1, so atomic decrements
// returns the correct result for a value for which no extra references
// have been created (instead of underflowing).
let refs = self.u32_literal(if atomic { 1 } else { 0 });

self.store_field(header, res, HEADER_CLASS_INDEX, class_ptr);
self.store_field(header, res, HEADER_REFS_INDEX, refs);
res
}
}

/// A wrapper around the LLVM types used for building debugging information.
Expand Down
15 changes: 13 additions & 2 deletions compiler/src/llvm/layouts.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,8 @@ use inkwell::types::{
};
use inkwell::AddressSpace;
use types::{
CallConvention, BOOL_ID, BYTE_ARRAY_ID, FLOAT_ID, INT_ID, NIL_ID, STRING_ID,
CallConvention, ClassId, BOOL_ID, BYTE_ARRAY_ID, FLOAT_ID, INT_ID, NIL_ID,
STRING_ID,
};

/// The size of an object header.
Expand All @@ -31,6 +32,8 @@ pub(crate) struct Method<'ctx> {

/// Types and layout information to expose to all modules.
pub(crate) struct Layouts<'ctx> {
pub(crate) target_data: &'ctx TargetData,

/// The layout of an empty class.
///
/// This is used for generating dynamic dispatch code, as we don't know the
Expand Down Expand Up @@ -80,7 +83,7 @@ impl<'ctx> Layouts<'ctx> {
state: &State,
mir: &Mir,
context: &'ctx Context,
target_data: TargetData,
target_data: &'ctx TargetData,
) -> Self {
let db = &state.db;
let space = AddressSpace::default();
Expand Down Expand Up @@ -179,6 +182,7 @@ impl<'ctx> Layouts<'ctx> {
};

let mut layouts = Self {
target_data,
empty_class: context.class_type(method),
method,
classes,
Expand Down Expand Up @@ -408,4 +412,11 @@ impl<'ctx> Layouts<'ctx> {

layouts
}

pub(crate) fn size_of_class(&self, class: ClassId) -> u64 {
let layout = &self.instances[class.0 as usize];

self.target_data.get_bit_size(layout)
/ (self.target_data.get_pointer_byte_size(None) as u64)
}
}
Loading

0 comments on commit ecbe1a6

Please sign in to comment.