Open Model Railroad Network (OpenMRN)
Loading...
Searching...
No Matches
logging_malloc.cxx
1#if !defined (__EMSCRIPTEN__) && !defined (__MACH__)
2
3#include <unwind.h>
4#include "os/os.h"
5#include "utils/Atomic.hxx"
6
7#define MAX_STRACE 20
8#define MIN_SIZE_TRACED 64
9
10extern "C" {
11extern void *stacktrace[MAX_STRACE];
12extern int strace_len;
13void call_unwind(void);
14}
15
17struct trace
18{
20 unsigned hash : 24;
22 unsigned len : 8;
24 struct trace *next;
26 unsigned total_size;
27};
28
29struct trace *all_traces = nullptr;
30
31unsigned hash_trace(unsigned len, unsigned *buf)
32{
33 unsigned ret = 0;
34 for (unsigned i = 0; i < len; ++i)
35 {
36 ret *= (1 + 4 + 16);
37 ret ^= buf[i];
38 }
39 return ret & 0xFFFFFF;
40}
41
42struct trace *find_current_trace(unsigned hash)
43{
44 for (struct trace *t = all_traces; t; t = t->next)
45 {
46 if (t->hash != (hash & 0xFFFFFF))
47 continue;
48 if (t->len != strace_len)
49 continue;
50 unsigned *payload = (unsigned *)(t + 1);
51 if (memcmp(payload, stacktrace, strace_len * sizeof(stacktrace[0])) !=
52 0)
53 continue;
54 return t;
55 }
56 return nullptr;
57}
58
59extern "C" {
60extern void *__wrap_malloc(size_t size);
61extern void *__real_malloc(size_t size);
62void* usb_malloc(unsigned long length);
63}
64
65struct trace *add_new_trace(unsigned hash)
66{
67 unsigned total_size = sizeof(struct trace) + strace_len * sizeof(stacktrace[0]);
68#if defined(TARGET_LPC2368) || defined(TARGET_LPC1768)
69 struct trace* t = (struct trace*)usb_malloc(total_size);
70#else
71 struct trace* t = (struct trace*)__real_malloc(total_size);
72#endif
73 memcpy(t + 1, stacktrace, strace_len * sizeof(stacktrace[0]));
74 t->hash = hash;
75 t->len = strace_len;
76 t->total_size = 0;
77 t->next = all_traces;
78 all_traces = t;
79 return t;
80}
81static Atomic* get_lock() {
82 static Atomic lock;
83 return &lock;
84}
85
86void *stacktrace[MAX_STRACE];
87int strace_len;
88
89_Unwind_Reason_Code trace_func(struct _Unwind_Context *context, void *arg)
90{
91 void *ip = (void *)_Unwind_GetIP(context);
92 if (strace_len > 0 && stacktrace[strace_len - 1] == ip)
93 {
94 return _URC_END_OF_STACK;
95 }
96 if (strace_len >= MAX_STRACE)
97 {
98 return _URC_END_OF_STACK;
99 }
100 stacktrace[strace_len++] = ip;
101 return _URC_NO_REASON;
102}
103
104void *__wrap_malloc(size_t size)
105{
106 if (size < MIN_SIZE_TRACED) {
107 return __real_malloc(size);
108 }
109 uintptr_t saved_lr = 0;
110#if defined(TARGET_LPC2368) || defined(TARGET_LPC1768) || defined(GCC_ARMCM3)
111 asm volatile ("mov %0, lr \n" : "=r" (saved_lr));
112#endif
113 {
114 AtomicHolder holder(get_lock());
115 strace_len = 0;
116 _Unwind_Backtrace(&trace_func, 0);
117 if (strace_len == 1) {
118 stacktrace[strace_len++] = (void*)saved_lr;
119 }
120 unsigned h = hash_trace(strace_len, (unsigned *)stacktrace);
121 struct trace *t = find_current_trace(h);
122 if (!t)
123 {
124 t = add_new_trace(h);
125 }
126 t->total_size += size;
127 }
128 return __real_malloc(size);
129}
130
131#endif // __EMSCRIPTEN__
See OSMutexLock in os/OS.hxx.
Definition Atomic.hxx:153
Lightweight locking class for protecting small critical sections.
Definition Atomic.hxx:130
void * usb_malloc(unsigned long length)
Custom malloc function for USB space.
Linked list entry type for a call-stack backtrace.
unsigned hash
For quick comparison of traces.
unsigned total_size
total memory (bytes) allocated via this trace.
unsigned len
Number of entries in the trace.
struct trace * next
Link to the next trace entry.