2#ifndef LIBSBX_MEMORY_TRACKING_ALLOCATOR_HPP_
3#define LIBSBX_MEMORY_TRACKING_ALLOCATOR_HPP_
11#include <unordered_map>
17#include <source_location>
19#include <magic_enum/magic_enum.hpp>
21#include <libsbx/utility/logger.hpp>
22#include <libsbx/utility/enum.hpp>
23#include <libsbx/utility/target.hpp>
25namespace sbx::memory {
29#if defined(SBX_MEMORY_TRACKING)
30 inline static constexpr auto value = (SBX_MEMORY_TRACKING != 0);
32 inline static constexpr auto value = utility::is_build_configuration_debug_v;
37inline constexpr auto is_memory_tracking_enabled_v = is_memory_tracking_enabled::value;
39enum class allocation_category : std::uint8_t {
51constexpr auto to_string(
const allocation_category category)
noexcept -> std::string_view {
53 case allocation_category::general:
return "General";
54 case allocation_category::engine:
return "Engine";
55 case allocation_category::graphics:
return "Graphics";
56 case allocation_category::physics:
return "Physics";
57 case allocation_category::audio:
return "Audio";
58 case allocation_category::scripting:
return "Scripting";
59 case allocation_category::ui:
return "Ui";
60 case allocation_category::ai:
return "Ai";
61 case allocation_category::unknown:
62 default:
return "Unknown";
68 std::size_t allocation_count{0};
69 std::size_t deallocation_count{0};
70 std::size_t bytes_allocated{0};
71 std::size_t bytes_freed{0};
72 std::size_t peak_bytes{0};
74 [[nodiscard]]
auto current_bytes()
const noexcept -> std::size_t {
75 return bytes_allocated - bytes_freed;
78 [[nodiscard]]
auto current_allocations()
const noexcept -> std::size_t {
79 return allocation_count - deallocation_count;
86 std::atomic<std::size_t> allocation_count{0};
87 std::atomic<std::size_t> deallocation_count{0};
88 std::atomic<std::size_t> bytes_allocated{0};
89 std::atomic<std::size_t> bytes_freed{0};
90 std::atomic<std::size_t> peak_bytes{0};
95 out.allocation_count = allocation_count.load(std::memory_order_relaxed);
96 out.deallocation_count = deallocation_count.load(std::memory_order_relaxed);
97 out.bytes_allocated = bytes_allocated.load(std::memory_order_relaxed);
98 out.bytes_freed = bytes_freed.load(std::memory_order_relaxed);
99 out.peak_bytes = peak_bytes.load(std::memory_order_relaxed);
104 [[nodiscard]]
auto current_bytes()
const noexcept -> std::size_t {
105 return bytes_allocated.load(std::memory_order_relaxed) - bytes_freed.load(std::memory_order_relaxed);
108 [[nodiscard]]
auto current_allocations()
const noexcept -> std::size_t {
109 return allocation_count.load(std::memory_order_relaxed) - deallocation_count.load(std::memory_order_relaxed);
116 inline static constexpr auto magic_allocated = std::uint32_t{0xABCD1234};
117 inline static constexpr auto magic_freed = std::uint32_t{0xDEADBEEF};
120 std::size_t alignment;
121 allocation_category category;
122 std::uint32_t source_line;
123 const char* source_file;
140 auto& statistics = _statistics[utility::to_underlying(header->category)];
142 statistics.allocation_count.fetch_add(1, std::memory_order_relaxed);
144 auto old_bytes = statistics.bytes_allocated.fetch_add(header->size, std::memory_order_relaxed);
146 auto current = old_bytes + header->size - statistics.bytes_freed.load(std::memory_order_relaxed);
147 auto peak = statistics.peak_bytes.load(std::memory_order_relaxed);
149 while (current > peak && !statistics.peak_bytes.compare_exchange_weak(peak, current, std::memory_order_relaxed)) { }
152 auto lock = std::lock_guard{_statistics_mutex};
154 header->previous =
nullptr;
155 header->next = _head;
158 _head->previous = header;
166 auto& statistics = _statistics[utility::to_underlying(header->category)];
168 statistics.deallocation_count.fetch_add(1, std::memory_order_relaxed);
169 statistics.bytes_freed.fetch_add(header->size, std::memory_order_relaxed);
172 auto lock = std::lock_guard{_statistics_mutex};
174 if (header->previous) {
175 header->previous->next = header->next;
177 _head = header->next;
180 header->next->previous = header->previous;
186 return _statistics[utility::to_underlying(category)].snapshot();
192 for (
const auto& statistics : _statistics) {
193 auto snapshot = statistics.snapshot();
195 total.allocation_count += snapshot.allocation_count;
196 total.deallocation_count += snapshot.deallocation_count;
197 total.bytes_allocated += snapshot.bytes_allocated;
198 total.bytes_freed += snapshot.bytes_freed;
199 total.peak_bytes = std::max(total.peak_bytes, snapshot.peak_bytes);
205 auto report_leaks()
const ->
void {
206 auto lock = std::lock_guard{_statistics_mutex};
212 auto leak_count = std::size_t{0};
213 auto leak_bytes = std::size_t{0};
215 for (
auto* header = _head; header; header = header->next) {
217 leak_bytes += header->size;
219 utility::logger<
"memory">::warn(
"Leak: {} bytes at {}:{}", header->size, header->source_file, header->source_line);
222 utility::logger<
"memory">::warn(
"Memory leaks detected: {} allocations, {} bytes", leak_count, leak_bytes);
237 std::array<allocation_statistics, magic_enum::enum_count<allocation_category>()> _statistics{};
238 mutable std::mutex _statistics_mutex;
246[[nodiscard]]
inline auto aligned_allocate(std::size_t size, std::size_t alignment,
const allocation_category category, std::string_view file, std::int32_t line) ->
void* {
247 auto effective_alignment = alignment;
249 effective_alignment = std::max(effective_alignment,
alignof(
void*));
250 effective_alignment = std::max(effective_alignment,
alignof(std::max_align_t));
252 if (!std::has_single_bit(effective_alignment)) {
253 effective_alignment = std::bit_ceil(effective_alignment);
256 auto overhead =
sizeof(allocation_header) +
sizeof(allocation_header*) + (effective_alignment - 1);
257 auto total_size = size + overhead;
259 auto* base = std::malloc(total_size);
262 throw std::bad_alloc{};
265 auto* base_bytes =
static_cast<std::byte*
>(base);
267 auto* header = std::construct_at(
reinterpret_cast<allocation_header*
>(base_bytes));
269 header->alignment = effective_alignment;
270 header->category = category;
271 header->source_line =
static_cast<std::uint32_t
>(line);
272 header->source_file = file.data();
273 header->previous =
nullptr;
274 header->next =
nullptr;
275 header->magic = allocation_header::magic_allocated;
277 auto* user_unaligned = base_bytes +
sizeof(allocation_header) +
sizeof(allocation_header*);
279 auto user_addr =
reinterpret_cast<std::uintptr_t
>(user_unaligned);
280 auto aligned_addr = (user_addr + effective_alignment - 1) & ~(
static_cast<std::uintptr_t
>(effective_alignment) - 1);
282 auto* user_ptr =
reinterpret_cast<std::byte*
>(aligned_addr);
284 auto* header_slot =
reinterpret_cast<allocation_header**
>(user_ptr -
sizeof(allocation_header*));
285 *header_slot = header;
287 memory_tracker::instance().record_allocation(header);
289 return static_cast<void*
>(user_ptr);
292inline auto aligned_deallocate(
void* ptr)
noexcept ->
void {
297 auto* user_bytes =
static_cast<std::byte*
>(ptr);
298 auto* header_slot =
reinterpret_cast<allocation_header**
>(user_bytes -
sizeof(allocation_header*));
299 auto* header = *header_slot;
305 if (header->magic != allocation_header::magic_allocated) {
309 header->magic = allocation_header::magic_freed;
311 memory_tracker::instance().record_deallocation(header);
313 std::destroy_at(header);
314 std::free(
static_cast<void*
>(header));
319template<
typename Type, allocation_category Category = allocation_category::general>
324 using value_type = Type;
325 using pointer = Type*;
326 using const_pointer =
const Type*;
327 using size_type = std::size_t;
328 using difference_type = std::ptrdiff_t;
330 template<
typename OtherType>
335 using is_always_equal = std::true_type;
336 using propagate_on_container_move_assignment = std::true_type;
340 template<typename OtherType, allocation_category OtherCategory>
343 [[nodiscard]]
auto allocate(size_type size,
const std::source_location& source_location = std::source_location::current()) -> pointer {
344 if (size > std::numeric_limits<size_type>::max() /
sizeof(Type)) {
345 throw std::bad_array_new_length{};
348 return static_cast<pointer
>(detail::aligned_allocate(size *
sizeof(Type),
alignof(Type), Category, source_location.file_name(),
static_cast<std::int32_t
>(source_location.line())));
351 auto deallocate(pointer ptr, size_type)
noexcept ->
void {
352 detail::aligned_deallocate(ptr);
357template<
typename LhsType, allocation_category LhsCategory,
typename RhsType, allocation_category RhsCategory>
358[[nodiscard]]
constexpr auto operator==(
const tracking_allocator<LhsType, LhsCategory>&,
const tracking_allocator<RhsType, RhsCategory>&)
noexcept ->
bool {
362template<
typename Type>
363using general_tracking_allocator = tracking_allocator<Type, allocation_category::general>;
370 allocation_category category;
371 const char* file_name;
375 explicit allocation_scope(allocation_category category,
const std::source_location& source_location = std::source_location::current()) noexcept
376 : _previous(_current) {
377 _current.category = category;
378 _current.file_name = source_location.file_name();
379 _current.line =
static_cast<std::uint32_t
>(source_location.line());
383 _current = _previous;
386 static auto current() noexcept -> const context& {
392 static thread_local context _current;
398#define SBX_MEMORY_SCOPE(category) auto scope_##__LINE__ = sbx::memory::allocation_scope{category, std::source_location::current()}
Definition: tracking_allocator.hpp:365
Definition: tracking_allocator.hpp:130
Definition: tracking_allocator.hpp:320
Definition: logger.hpp:124
Definition: tracking_allocator.hpp:369
Definition: tracking_allocator.hpp:66
Definition: tracking_allocator.hpp:84
Definition: tracking_allocator.hpp:27
Definition: tracking_allocator.hpp:331