Skip to content
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.

Commit 8524cba

Browse files
committedMar 4, 2017
Fully Reformat fallback_malloc.cpp
This patch fully reformats fallback_malloc.cpp. Previously the test was a mess of different styles and indentations. This made it very hard to work in and read. Therefore I felt it was best to re-format the whole thing. Unfortuantly this means some history will be lost, but hopefully much of it will still be accessible after ignoring whitespace changes. llvm-svn: 296960
1 parent d6aa0d0 commit 8524cba

File tree

2 files changed

+158
-140
lines changed

2 files changed

+158
-140
lines changed
 

‎libcxxabi/.clang-format

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
BasedOnStyle: LLVM
2+
3+
---
4+
Language: Cpp
5+
6+
AlwaysBreakTemplateDeclarations: true
7+
PointerAlignment: Left
8+
9+
# Disable formatting options which may break tests.
10+
SortIncludes: false
11+
ReflowComments: false
12+
---

‎libcxxabi/src/fallback_malloc.cpp

Lines changed: 146 additions & 140 deletions
Original file line numberDiff line numberDiff line change
@@ -32,216 +32,222 @@ namespace {
3232
_LIBCPP_SAFE_STATIC
3333
static std::__libcpp_mutex_t heap_mutex = _LIBCPP_MUTEX_INITIALIZER;
3434
#else
35-
static void * heap_mutex = 0;
35+
static void* heap_mutex = 0;
3636
#endif
3737

3838
class mutexor {
3939
public:
4040
#ifndef _LIBCXXABI_HAS_NO_THREADS
41-
mutexor ( std::__libcpp_mutex_t *m ) : mtx_(m) {
42-
std::__libcpp_mutex_lock ( mtx_ );
43-
}
44-
~mutexor () { std::__libcpp_mutex_unlock ( mtx_ ); }
41+
mutexor(std::__libcpp_mutex_t* m) : mtx_(m) {
42+
std::__libcpp_mutex_lock(mtx_);
43+
}
44+
~mutexor() { std::__libcpp_mutex_unlock(mtx_); }
4545
#else
46-
mutexor ( void * ) {}
47-
~mutexor () {}
46+
mutexor(void*) {}
47+
~mutexor() {}
4848
#endif
4949
private:
50-
mutexor ( const mutexor &rhs );
51-
mutexor & operator = ( const mutexor &rhs );
50+
mutexor(const mutexor& rhs);
51+
mutexor& operator=(const mutexor& rhs);
5252
#ifndef _LIBCXXABI_HAS_NO_THREADS
53-
std::__libcpp_mutex_t *mtx_;
53+
std::__libcpp_mutex_t* mtx_;
5454
#endif
5555
};
5656

57-
5857
static const size_t HEAP_SIZE = 512;
59-
char heap [ HEAP_SIZE ] __attribute__((aligned));
58+
char heap[HEAP_SIZE] __attribute__((aligned));
6059

6160
typedef unsigned short heap_offset;
6261
typedef unsigned short heap_size;
6362

6463
struct heap_node {
65-
heap_offset next_node; // offset into heap
66-
heap_size len; // size in units of "sizeof(heap_node)"
64+
heap_offset next_node; // offset into heap
65+
heap_size len; // size in units of "sizeof(heap_node)"
6766
};
6867

69-
static const heap_node *list_end = (heap_node *) ( &heap [ HEAP_SIZE ] ); // one past the end of the heap
70-
static heap_node *freelist = NULL;
68+
static const heap_node* list_end =
69+
(heap_node*)(&heap[HEAP_SIZE]); // one past the end of the heap
70+
static heap_node* freelist = NULL;
7171

72-
heap_node *node_from_offset ( const heap_offset offset )
73-
{ return (heap_node *) ( heap + ( offset * sizeof (heap_node))); }
72+
heap_node* node_from_offset(const heap_offset offset) {
73+
return (heap_node*)(heap + (offset * sizeof(heap_node)));
74+
}
7475

75-
heap_offset offset_from_node ( const heap_node *ptr )
76-
{ return static_cast<heap_offset>(static_cast<size_t>(reinterpret_cast<const char *>(ptr) - heap) / sizeof (heap_node)); }
76+
heap_offset offset_from_node(const heap_node* ptr) {
77+
return static_cast<heap_offset>(
78+
static_cast<size_t>(reinterpret_cast<const char*>(ptr) - heap) /
79+
sizeof(heap_node));
80+
}
7781

78-
void init_heap () {
79-
freelist = (heap_node *) heap;
80-
freelist->next_node = offset_from_node ( list_end );
81-
freelist->len = HEAP_SIZE / sizeof (heap_node);
82-
}
82+
void init_heap() {
83+
freelist = (heap_node*)heap;
84+
freelist->next_node = offset_from_node(list_end);
85+
freelist->len = HEAP_SIZE / sizeof(heap_node);
86+
}
8387

8488
// How big a chunk we allocate
85-
size_t alloc_size (size_t len)
86-
{ return (len + sizeof(heap_node) - 1) / sizeof(heap_node) + 1; }
87-
88-
bool is_fallback_ptr ( void *ptr )
89-
{ return ptr >= heap && ptr < ( heap + HEAP_SIZE ); }
90-
91-
void *fallback_malloc(size_t len) {
92-
heap_node *p, *prev;
93-
const size_t nelems = alloc_size ( len );
94-
mutexor mtx ( &heap_mutex );
95-
96-
if ( NULL == freelist )
97-
init_heap ();
98-
99-
// Walk the free list, looking for a "big enough" chunk
100-
for (p = freelist, prev = 0;
101-
p && p != list_end; prev = p, p = node_from_offset ( p->next_node)) {
102-
103-
if (p->len > nelems) { // chunk is larger, shorten, and return the tail
104-
heap_node *q;
105-
106-
p->len = static_cast<heap_size>(p->len - nelems);
107-
q = p + p->len;
108-
q->next_node = 0;
109-
q->len = static_cast<heap_size>(nelems);
110-
return (void *) (q + 1);
111-
}
112-
113-
if (p->len == nelems) { // exact size match
114-
if (prev == 0)
115-
freelist = node_from_offset(p->next_node);
116-
else
117-
prev->next_node = p->next_node;
118-
p->next_node = 0;
119-
return (void *) (p + 1);
120-
}
89+
size_t alloc_size(size_t len) {
90+
return (len + sizeof(heap_node) - 1) / sizeof(heap_node) + 1;
91+
}
92+
93+
bool is_fallback_ptr(void* ptr) {
94+
return ptr >= heap && ptr < (heap + HEAP_SIZE);
95+
}
96+
97+
void* fallback_malloc(size_t len) {
98+
heap_node *p, *prev;
99+
const size_t nelems = alloc_size(len);
100+
mutexor mtx(&heap_mutex);
101+
102+
if (NULL == freelist)
103+
init_heap();
104+
105+
// Walk the free list, looking for a "big enough" chunk
106+
for (p = freelist, prev = 0; p && p != list_end;
107+
prev = p, p = node_from_offset(p->next_node)) {
108+
109+
if (p->len > nelems) { // chunk is larger, shorten, and return the tail
110+
heap_node* q;
111+
112+
p->len = static_cast<heap_size>(p->len - nelems);
113+
q = p + p->len;
114+
q->next_node = 0;
115+
q->len = static_cast<heap_size>(nelems);
116+
return (void*)(q + 1);
117+
}
118+
119+
if (p->len == nelems) { // exact size match
120+
if (prev == 0)
121+
freelist = node_from_offset(p->next_node);
122+
else
123+
prev->next_node = p->next_node;
124+
p->next_node = 0;
125+
return (void*)(p + 1);
121126
}
122-
return NULL; // couldn't find a spot big enough
127+
}
128+
return NULL; // couldn't find a spot big enough
123129
}
124130

125131
// Return the start of the next block
126-
heap_node *after ( struct heap_node *p ) { return p + p->len; }
132+
heap_node* after(struct heap_node* p) { return p + p->len; }
127133

128-
void fallback_free (void *ptr) {
129-
struct heap_node *cp = ((struct heap_node *) ptr) - 1; // retrieve the chunk
130-
struct heap_node *p, *prev;
134+
void fallback_free(void* ptr) {
135+
struct heap_node* cp = ((struct heap_node*)ptr) - 1; // retrieve the chunk
136+
struct heap_node *p, *prev;
131137

132-
mutexor mtx ( &heap_mutex );
138+
mutexor mtx(&heap_mutex);
133139

134140
#ifdef DEBUG_FALLBACK_MALLOC
135-
std::cout << "Freeing item at " << offset_from_node ( cp ) << " of size " << cp->len << std::endl;
141+
std::cout << "Freeing item at " << offset_from_node(cp) << " of size "
142+
<< cp->len << std::endl;
136143
#endif
137144

138-
for (p = freelist, prev = 0;
139-
p && p != list_end; prev = p, p = node_from_offset (p->next_node)) {
145+
for (p = freelist, prev = 0; p && p != list_end;
146+
prev = p, p = node_from_offset(p->next_node)) {
140147
#ifdef DEBUG_FALLBACK_MALLOC
141-
std::cout << " p, cp, after (p), after(cp) "
142-
<< offset_from_node ( p ) << ' '
143-
<< offset_from_node ( cp ) << ' '
144-
<< offset_from_node ( after ( p )) << ' '
145-
<< offset_from_node ( after ( cp )) << std::endl;
148+
std::cout << " p, cp, after (p), after(cp) " << offset_from_node(p) << ' '
149+
<< offset_from_node(cp) << ' ' << offset_from_node(after(p))
150+
<< ' ' << offset_from_node(after(cp)) << std::endl;
146151
#endif
147-
if ( after ( p ) == cp ) {
152+
if (after(p) == cp) {
148153
#ifdef DEBUG_FALLBACK_MALLOC
149-
std::cout << " Appending onto chunk at " << offset_from_node ( p ) << std::endl;
154+
std::cout << " Appending onto chunk at " << offset_from_node(p)
155+
<< std::endl;
150156
#endif
151-
p->len = static_cast<heap_size>(p->len + cp->len); // make the free heap_node larger
152-
return;
153-
}
154-
else if ( after ( cp ) == p ) { // there's a free heap_node right after
157+
p->len = static_cast<heap_size>(
158+
p->len + cp->len); // make the free heap_node larger
159+
return;
160+
} else if (after(cp) == p) { // there's a free heap_node right after
155161
#ifdef DEBUG_FALLBACK_MALLOC
156-
std::cout << " Appending free chunk at " << offset_from_node ( p ) << std::endl;
162+
std::cout << " Appending free chunk at " << offset_from_node(p)
163+
<< std::endl;
157164
#endif
158-
cp->len = static_cast<heap_size>(cp->len + p->len);
159-
if ( prev == 0 ) {
160-
freelist = cp;
161-
cp->next_node = p->next_node;
162-
}
163-
else
164-
prev->next_node = offset_from_node(cp);
165-
return;
166-
}
167-
}
165+
cp->len = static_cast<heap_size>(cp->len + p->len);
166+
if (prev == 0) {
167+
freelist = cp;
168+
cp->next_node = p->next_node;
169+
} else
170+
prev->next_node = offset_from_node(cp);
171+
return;
172+
}
173+
}
168174
// Nothing to merge with, add it to the start of the free list
169175
#ifdef DEBUG_FALLBACK_MALLOC
170-
std::cout << " Making new free list entry " << offset_from_node ( cp ) << std::endl;
176+
std::cout << " Making new free list entry " << offset_from_node(cp)
177+
<< std::endl;
171178
#endif
172-
cp->next_node = offset_from_node ( freelist );
173-
freelist = cp;
179+
cp->next_node = offset_from_node(freelist);
180+
freelist = cp;
174181
}
175182

176183
#ifdef INSTRUMENT_FALLBACK_MALLOC
177-
size_t print_free_list () {
178-
struct heap_node *p, *prev;
179-
heap_size total_free = 0;
180-
if ( NULL == freelist )
181-
init_heap ();
182-
183-
for (p = freelist, prev = 0;
184-
p && p != list_end; prev = p, p = node_from_offset (p->next_node)) {
185-
std::cout << ( prev == 0 ? "" : " ") << "Offset: " << offset_from_node ( p )
186-
<< "\tsize: " << p->len << " Next: " << p->next_node << std::endl;
187-
total_free += p->len;
188-
}
189-
std::cout << "Total Free space: " << total_free << std::endl;
190-
return total_free;
191-
}
184+
size_t print_free_list() {
185+
struct heap_node *p, *prev;
186+
heap_size total_free = 0;
187+
if (NULL == freelist)
188+
init_heap();
189+
190+
for (p = freelist, prev = 0; p && p != list_end;
191+
prev = p, p = node_from_offset(p->next_node)) {
192+
std::cout << (prev == 0 ? "" : " ") << "Offset: " << offset_from_node(p)
193+
<< "\tsize: " << p->len << " Next: " << p->next_node << std::endl;
194+
total_free += p->len;
195+
}
196+
std::cout << "Total Free space: " << total_free << std::endl;
197+
return total_free;
198+
}
192199
#endif
193-
} // end unnamed namespace
200+
} // end unnamed namespace
194201

195202
namespace __cxxabiv1 {
196203

197-
struct __attribute__((aligned)) __aligned_type {};
204+
struct __attribute__((aligned)) __aligned_type {};
198205

199-
void * __aligned_malloc_with_fallback(size_t size) {
206+
void* __aligned_malloc_with_fallback(size_t size) {
200207
#if defined(_WIN32)
201-
if (void *dest = _aligned_malloc(size, alignof(__aligned_type)))
202-
return dest;
208+
if (void* dest = _aligned_malloc(size, alignof(__aligned_type)))
209+
return dest;
203210
#elif defined(_LIBCPP_HAS_NO_ALIGNED_ALLOCATION)
204-
if (void* dest = std::malloc(size))
205-
return dest;
211+
if (void* dest = std::malloc(size))
212+
return dest;
206213
#else
207-
if (size == 0)
208-
size = 1;
209-
void* dest;
210-
if (::posix_memalign(&dest, alignof(__aligned_type), size) == 0)
211-
return dest;
214+
if (size == 0)
215+
size = 1;
216+
void* dest;
217+
if (::posix_memalign(&dest, alignof(__aligned_type), size) == 0)
218+
return dest;
212219
#endif
213-
return fallback_malloc(size);
220+
return fallback_malloc(size);
214221
}
215222

216-
217-
void * __calloc_with_fallback(size_t count, size_t size) {
218-
void *ptr = std::calloc(count, size);
219-
if (NULL != ptr)
220-
return ptr;
221-
// if calloc fails, fall back to emergency stash
222-
ptr = fallback_malloc(size * count);
223-
if (NULL != ptr)
224-
std::memset(ptr, 0, size * count);
223+
void* __calloc_with_fallback(size_t count, size_t size) {
224+
void* ptr = std::calloc(count, size);
225+
if (NULL != ptr)
225226
return ptr;
227+
// if calloc fails, fall back to emergency stash
228+
ptr = fallback_malloc(size * count);
229+
if (NULL != ptr)
230+
std::memset(ptr, 0, size * count);
231+
return ptr;
226232
}
227233

228234
void __aligned_free_with_fallback(void* ptr) {
229235
if (is_fallback_ptr(ptr))
230-
fallback_free(ptr);
236+
fallback_free(ptr);
231237
else {
232238
#if defined(_WIN32)
233-
::_aligned_free(ptr);
239+
::_aligned_free(ptr);
234240
#else
235-
std::free(ptr);
241+
std::free(ptr);
236242
#endif
237243
}
238244
}
239245

240-
void __free_with_fallback(void *ptr) {
241-
if (is_fallback_ptr(ptr))
242-
fallback_free(ptr);
243-
else
244-
std::free(ptr);
246+
void __free_with_fallback(void* ptr) {
247+
if (is_fallback_ptr(ptr))
248+
fallback_free(ptr);
249+
else
250+
std::free(ptr);
245251
}
246252

247253
} // namespace __cxxabiv1

0 commit comments

Comments
 (0)
Please sign in to comment.