Skip to content

[libc][NFC] Remove template arguments from Block #117386

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Nov 22, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
155 changes: 59 additions & 96 deletions libc/src/__support/block.h
Original file line number Diff line number Diff line change
Expand Up @@ -68,13 +68,11 @@ using cpp::optional;
/// The blocks store their offsets to the previous and next blocks. The latter
/// is also the block's size.
///
/// The `ALIGNMENT` constant provided by the derived block is typically the
/// minimum value of `alignof(OffsetType)`. Blocks will always be aligned to a
/// `ALIGNMENT` boundary. Block sizes will always be rounded up to a multiple of
/// `ALIGNMENT`.
/// Blocks will always be aligned to a `ALIGNMENT` boundary. Block sizes will
/// always be rounded up to a multiple of `ALIGNMENT`.
///
/// As an example, the diagram below represents two contiguous
/// `Block<uint32_t, 8>`s. The indices indicate byte offsets:
/// As an example, the diagram below represents two contiguous `Block`s. The
/// indices indicate byte offsets:
///
/// @code{.unparsed}
/// Block 1:
Expand Down Expand Up @@ -117,30 +115,15 @@ using cpp::optional;
///
/// The next offset of a block matches the previous offset of its next block.
/// The first block in a list is denoted by having a previous offset of `0`.
///
/// @tparam OffsetType Unsigned integral type used to encode offsets. Larger
/// types can address more memory, but consume greater
/// overhead.
/// @tparam kAlign Sets the overall alignment for blocks. Minimum is
/// `alignof(OffsetType)`, but the default is max_align_t,
/// since the usable space will then already be
/// aligned to max_align_t if the size of OffsetType is no
/// less than half of max_align_t. Larger values cause
/// greater overhead.
template <typename OffsetType = uintptr_t, size_t kAlign = alignof(max_align_t)>
class Block {
// Masks for the contents of the next_ field.
static constexpr size_t PREV_FREE_MASK = 1 << 0;
static constexpr size_t LAST_MASK = 1 << 1;
static constexpr size_t SIZE_MASK = ~(PREV_FREE_MASK | LAST_MASK);

public:
using offset_type = OffsetType;
static_assert(cpp::is_unsigned_v<offset_type>,
"offset type must be unsigned");
static constexpr size_t ALIGNMENT =
cpp::max(cpp::max(kAlign, alignof(offset_type)), size_t{4});
static constexpr size_t BLOCK_OVERHEAD = align_up(sizeof(Block), ALIGNMENT);
static constexpr size_t ALIGNMENT = cpp::max(alignof(max_align_t), size_t{4});
static const size_t BLOCK_OVERHEAD;

// No copy or move.
Block(const Block &other) = delete;
Expand All @@ -157,61 +140,61 @@ class Block {
///
/// @warning This method does not do any checking; passing a random
/// pointer will return a non-null pointer.
static Block *from_usable_space(void *usable_space) {
LIBC_INLINE static Block *from_usable_space(void *usable_space) {
auto *bytes = reinterpret_cast<cpp::byte *>(usable_space);
return reinterpret_cast<Block *>(bytes - BLOCK_OVERHEAD);
}
static const Block *from_usable_space(const void *usable_space) {
LIBC_INLINE static const Block *from_usable_space(const void *usable_space) {
const auto *bytes = reinterpret_cast<const cpp::byte *>(usable_space);
return reinterpret_cast<const Block *>(bytes - BLOCK_OVERHEAD);
}

/// @returns The total size of the block in bytes, including the header.
size_t outer_size() const { return next_ & SIZE_MASK; }
LIBC_INLINE size_t outer_size() const { return next_ & SIZE_MASK; }

static size_t outer_size(size_t inner_size) {
LIBC_INLINE static size_t outer_size(size_t inner_size) {
// The usable region includes the prev_ field of the next block.
return inner_size - sizeof(prev_) + BLOCK_OVERHEAD;
}

/// @returns The number of usable bytes inside the block were it to be
/// allocated.
size_t inner_size() const {
LIBC_INLINE size_t inner_size() const {
if (!next())
return 0;
return inner_size(outer_size());
}

/// @returns The number of usable bytes inside a block with the given outer
/// size were it to be allocated.
static size_t inner_size(size_t outer_size) {
LIBC_INLINE static size_t inner_size(size_t outer_size) {
// The usable region includes the prev_ field of the next block.
return inner_size_free(outer_size) + sizeof(prev_);
}

/// @returns The number of usable bytes inside the block if it remains free.
size_t inner_size_free() const {
LIBC_INLINE size_t inner_size_free() const {
if (!next())
return 0;
return inner_size_free(outer_size());
}

/// @returns The number of usable bytes inside a block with the given outer
/// size if it remains free.
static size_t inner_size_free(size_t outer_size) {
LIBC_INLINE static size_t inner_size_free(size_t outer_size) {
return outer_size - BLOCK_OVERHEAD;
}

/// @returns A pointer to the usable space inside this block.
cpp::byte *usable_space() {
LIBC_INLINE cpp::byte *usable_space() {
return reinterpret_cast<cpp::byte *>(this) + BLOCK_OVERHEAD;
}
const cpp::byte *usable_space() const {
LIBC_INLINE const cpp::byte *usable_space() const {
return reinterpret_cast<const cpp::byte *>(this) + BLOCK_OVERHEAD;
}

// @returns The region of memory the block manages, including the header.
ByteSpan region() {
LIBC_INLINE ByteSpan region() {
return {reinterpret_cast<cpp::byte *>(this), outer_size()};
}

Expand All @@ -229,42 +212,53 @@ class Block {

/// @returns The block immediately after this one, or a null pointer if this
/// is the last block.
Block *next() const;
LIBC_INLINE Block *next() const {
if (next_ & LAST_MASK)
return nullptr;
return reinterpret_cast<Block *>(reinterpret_cast<uintptr_t>(this) +
outer_size());
}

/// @returns The free block immediately before this one, otherwise nullptr.
Block *prev_free() const;
LIBC_INLINE Block *prev_free() const {
if (!(next_ & PREV_FREE_MASK))
return nullptr;
return reinterpret_cast<Block *>(reinterpret_cast<uintptr_t>(this) - prev_);
}

/// @returns Whether the block is unavailable for allocation.
bool used() const { return !next() || !next()->prev_free(); }
LIBC_INLINE bool used() const { return !next() || !next()->prev_free(); }

/// Marks this block as in use.
void mark_used() {
LIBC_INLINE void mark_used() {
LIBC_ASSERT(next() && "last block is always considered used");
next()->next_ &= ~PREV_FREE_MASK;
}

/// Marks this block as free.
void mark_free() {
LIBC_INLINE void mark_free() {
LIBC_ASSERT(next() && "last block is always considered used");
next()->next_ |= PREV_FREE_MASK;
// The next block's prev_ field becomes alive, as it is no longer part of
// this block's used space.
*new (&next()->prev_) offset_type = outer_size();
*new (&next()->prev_) size_t = outer_size();
}

/// Marks this block as the last one in the chain. Makes next() return
/// nullptr.
void mark_last() { next_ |= LAST_MASK; }
LIBC_INLINE void mark_last() { next_ |= LAST_MASK; }

constexpr Block(size_t outer_size);
LIBC_INLINE constexpr Block(size_t outer_size) : next_(outer_size) {
LIBC_ASSERT(outer_size % ALIGNMENT == 0 && "block sizes must be aligned");
}

bool is_usable_space_aligned(size_t alignment) const {
LIBC_INLINE bool is_usable_space_aligned(size_t alignment) const {
return reinterpret_cast<uintptr_t>(usable_space()) % alignment == 0;
}

/// @returns The new inner size of this block that would give the usable
/// space of the next block the given alignment.
size_t padding_for_alignment(size_t alignment) const {
LIBC_INLINE size_t padding_for_alignment(size_t alignment) const {
if (is_usable_space_aligned(alignment))
return 0;

Expand Down Expand Up @@ -322,7 +316,9 @@ class Block {
private:
/// Construct a block to represent a span of bytes. Overwrites only enough
/// memory for the block header; the rest of the span is left alone.
static Block *as_block(ByteSpan bytes);
LIBC_INLINE static Block *as_block(ByteSpan bytes) {
return ::new (bytes.data()) Block(bytes.size());
}

/// Like `split`, but assumes the caller has already checked to parameters to
/// ensure the split will succeed.
Expand All @@ -332,11 +328,11 @@ class Block {
/// block. This field is only alive when the previous block is free;
/// otherwise, its memory is reused as part of the previous block's usable
/// space.
offset_type prev_ = 0;
size_t prev_ = 0;

/// Offset from this block to the next block. Valid even if this is the last
/// block, since it equals the size of the block.
offset_type next_ = 0;
size_t next_ = 0;

/// Information about the current state of the block is stored in the two low
/// order bits of the next_ value. These are guaranteed free by a minimum
Expand All @@ -347,9 +343,10 @@ class Block {
/// previous block is free.
/// * If the `last` flag is set, the block is the sentinel last block. It is
/// summarily considered used and has no next block.
} __attribute__((packed, aligned(cpp::max(kAlign, size_t{4}))));
} __attribute__((packed, aligned(cpp::max(alignof(max_align_t), size_t{4}))));

// Public template method implementations.
inline constexpr size_t Block::BLOCK_OVERHEAD =
align_up(sizeof(Block), ALIGNMENT);

LIBC_INLINE ByteSpan get_aligned_subspan(ByteSpan bytes, size_t alignment) {
if (bytes.data() == nullptr)
Expand All @@ -367,9 +364,8 @@ LIBC_INLINE ByteSpan get_aligned_subspan(ByteSpan bytes, size_t alignment) {
aligned_end - aligned_start);
}

template <typename OffsetType, size_t kAlign>
optional<Block<OffsetType, kAlign> *>
Block<OffsetType, kAlign>::init(ByteSpan region) {
LIBC_INLINE
optional<Block *> Block::init(ByteSpan region) {
optional<ByteSpan> result = get_aligned_subspan(region, ALIGNMENT);
if (!result)
return {};
Expand All @@ -379,7 +375,7 @@ Block<OffsetType, kAlign>::init(ByteSpan region) {
if (region.size() < 2 * BLOCK_OVERHEAD)
return {};

if (cpp::numeric_limits<OffsetType>::max() < region.size())
if (cpp::numeric_limits<size_t>::max() < region.size())
return {};

Block *block = as_block(region.first(region.size() - BLOCK_OVERHEAD));
Expand All @@ -389,9 +385,8 @@ Block<OffsetType, kAlign>::init(ByteSpan region) {
return block;
}

template <typename OffsetType, size_t kAlign>
bool Block<OffsetType, kAlign>::can_allocate(size_t alignment,
size_t size) const {
LIBC_INLINE
bool Block::can_allocate(size_t alignment, size_t size) const {
if (inner_size() < size)
return false;
if (is_usable_space_aligned(alignment))
Expand All @@ -406,10 +401,8 @@ bool Block<OffsetType, kAlign>::can_allocate(size_t alignment,
return size <= aligned_inner_size;
}

template <typename OffsetType, size_t kAlign>
typename Block<OffsetType, kAlign>::BlockInfo
Block<OffsetType, kAlign>::allocate(Block *block, size_t alignment,
size_t size) {
LIBC_INLINE
Block::BlockInfo Block::allocate(Block *block, size_t alignment, size_t size) {
LIBC_ASSERT(
block->can_allocate(alignment, size) &&
"Calls to this function for a given alignment and size should only be "
Expand Down Expand Up @@ -447,9 +440,8 @@ Block<OffsetType, kAlign>::allocate(Block *block, size_t alignment,
return info;
}

template <typename OffsetType, size_t kAlign>
optional<Block<OffsetType, kAlign> *>
Block<OffsetType, kAlign>::split(size_t new_inner_size) {
LIBC_INLINE
optional<Block *> Block::split(size_t new_inner_size) {
if (used())
return {};
// The prev_ field of the next block is always available, so there is a
Expand All @@ -469,9 +461,8 @@ Block<OffsetType, kAlign>::split(size_t new_inner_size) {
return split_impl(new_inner_size);
}

template <typename OffsetType, size_t kAlign>
Block<OffsetType, kAlign> *
Block<OffsetType, kAlign>::split_impl(size_t new_inner_size) {
LIBC_INLINE
Block *Block::split_impl(size_t new_inner_size) {
size_t outer_size1 = outer_size(new_inner_size);
LIBC_ASSERT(outer_size1 % ALIGNMENT == 0 && "new size must be aligned");
ByteSpan new_region = region().subspan(outer_size1);
Expand All @@ -484,8 +475,8 @@ Block<OffsetType, kAlign>::split_impl(size_t new_inner_size) {
return new_block;
}

template <typename OffsetType, size_t kAlign>
bool Block<OffsetType, kAlign>::merge_next() {
LIBC_INLINE
bool Block::merge_next() {
if (used() || next()->used())
return false;
size_t new_size = outer_size() + next()->outer_size();
Expand All @@ -495,34 +486,6 @@ bool Block<OffsetType, kAlign>::merge_next() {
return true;
}

template <typename OffsetType, size_t kAlign>
Block<OffsetType, kAlign> *Block<OffsetType, kAlign>::next() const {
if (next_ & LAST_MASK)
return nullptr;
return reinterpret_cast<Block *>(reinterpret_cast<uintptr_t>(this) +
outer_size());
}

template <typename OffsetType, size_t kAlign>
Block<OffsetType, kAlign> *Block<OffsetType, kAlign>::prev_free() const {
if (!(next_ & PREV_FREE_MASK))
return nullptr;
return reinterpret_cast<Block *>(reinterpret_cast<uintptr_t>(this) - prev_);
}

// Private template method implementations.

template <typename OffsetType, size_t kAlign>
constexpr Block<OffsetType, kAlign>::Block(size_t outer_size)
: next_(outer_size) {
LIBC_ASSERT(outer_size % ALIGNMENT == 0 && "block sizes must be aligned");
}

template <typename OffsetType, size_t kAlign>
Block<OffsetType, kAlign> *Block<OffsetType, kAlign>::as_block(ByteSpan bytes) {
return ::new (bytes.data()) Block(bytes.size());
}

} // namespace LIBC_NAMESPACE_DECL

#endif // LLVM_LIBC_SRC___SUPPORT_BLOCK_H
2 changes: 1 addition & 1 deletion libc/src/__support/freelist.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ namespace LIBC_NAMESPACE_DECL {

void FreeList::push(Node *node) {
if (begin_) {
LIBC_ASSERT(Block<>::from_usable_space(node)->outer_size() ==
LIBC_ASSERT(Block::from_usable_space(node)->outer_size() ==
begin_->block()->outer_size() &&
"freelist entries must have the same size");
// Since the list is circular, insert the node immediately before begin_.
Expand Down
10 changes: 5 additions & 5 deletions libc/src/__support/freelist.h
Original file line number Diff line number Diff line change
Expand Up @@ -26,12 +26,12 @@ class FreeList {
class Node {
public:
/// @returns The block containing this node.
LIBC_INLINE const Block<> *block() const {
return Block<>::from_usable_space(this);
LIBC_INLINE const Block *block() const {
return Block::from_usable_space(this);
}

/// @returns The block containing this node.
LIBC_INLINE Block<> *block() { return Block<>::from_usable_space(this); }
LIBC_INLINE Block *block() { return Block::from_usable_space(this); }

/// @returns The inner size of blocks in the list containing this node.
LIBC_INLINE size_t size() const { return block()->inner_size(); }
Expand All @@ -58,11 +58,11 @@ class FreeList {
LIBC_INLINE Node *begin() { return begin_; }

/// @returns The first block in the list.
LIBC_INLINE Block<> *front() { return begin_->block(); }
LIBC_INLINE Block *front() { return begin_->block(); }

/// Push a block to the back of the list.
/// The block must be large enough to contain a node.
LIBC_INLINE void push(Block<> *block) {
LIBC_INLINE void push(Block *block) {
LIBC_ASSERT(!block->used() &&
"only free blocks can be placed on free lists");
LIBC_ASSERT(block->inner_size_free() >= sizeof(FreeList) &&
Expand Down
Loading
Loading