@@ -32,216 +32,222 @@ namespace {
32
32
_LIBCPP_SAFE_STATIC
33
33
static std::__libcpp_mutex_t heap_mutex = _LIBCPP_MUTEX_INITIALIZER;
34
34
#else
35
- static void * heap_mutex = 0 ;
35
+ static void * heap_mutex = 0 ;
36
36
#endif
37
37
38
38
class mutexor {
39
39
public:
40
40
#ifndef _LIBCXXABI_HAS_NO_THREADS
41
- mutexor ( std::__libcpp_mutex_t *m ) : mtx_(m) {
42
- std::__libcpp_mutex_lock ( mtx_ );
43
- }
44
- ~mutexor () { std::__libcpp_mutex_unlock ( mtx_ ); }
41
+ mutexor ( std::__libcpp_mutex_t * m ) : mtx_(m) {
42
+ std::__libcpp_mutex_lock ( mtx_);
43
+ }
44
+ ~mutexor () { std::__libcpp_mutex_unlock ( mtx_); }
45
45
#else
46
- mutexor ( void * ) {}
47
- ~mutexor () {}
46
+ mutexor ( void * ) {}
47
+ ~mutexor () {}
48
48
#endif
49
49
private:
50
- mutexor ( const mutexor &rhs );
51
- mutexor & operator = ( const mutexor &rhs );
50
+ mutexor ( const mutexor& rhs );
51
+ mutexor& operator =( const mutexor& rhs );
52
52
#ifndef _LIBCXXABI_HAS_NO_THREADS
53
- std::__libcpp_mutex_t * mtx_;
53
+ std::__libcpp_mutex_t * mtx_;
54
54
#endif
55
55
};
56
56
57
-
58
57
static const size_t HEAP_SIZE = 512 ;
59
- char heap [ HEAP_SIZE ] __attribute__((aligned));
58
+ char heap[ HEAP_SIZE] __attribute__((aligned));
60
59
61
60
typedef unsigned short heap_offset;
62
61
typedef unsigned short heap_size;
63
62
64
63
struct heap_node {
65
- heap_offset next_node; // offset into heap
66
- heap_size len; // size in units of "sizeof(heap_node)"
64
+ heap_offset next_node; // offset into heap
65
+ heap_size len; // size in units of "sizeof(heap_node)"
67
66
};
68
67
69
- static const heap_node *list_end = (heap_node *) ( &heap [ HEAP_SIZE ] ); // one past the end of the heap
70
- static heap_node *freelist = NULL ;
68
+ static const heap_node* list_end =
69
+ (heap_node*)(&heap[HEAP_SIZE]); // one past the end of the heap
70
+ static heap_node* freelist = NULL ;
71
71
72
- heap_node *node_from_offset ( const heap_offset offset )
73
- { return (heap_node *) ( heap + ( offset * sizeof (heap_node))); }
72
+ heap_node* node_from_offset (const heap_offset offset) {
73
+ return (heap_node*)(heap + (offset * sizeof (heap_node)));
74
+ }
74
75
75
- heap_offset offset_from_node ( const heap_node *ptr )
76
- { return static_cast <heap_offset>(static_cast <size_t >(reinterpret_cast <const char *>(ptr) - heap) / sizeof (heap_node)); }
76
+ heap_offset offset_from_node (const heap_node* ptr) {
77
+ return static_cast <heap_offset>(
78
+ static_cast <size_t >(reinterpret_cast <const char *>(ptr) - heap) /
79
+ sizeof (heap_node));
80
+ }
77
81
78
- void init_heap () {
79
- freelist = (heap_node *) heap;
80
- freelist->next_node = offset_from_node ( list_end );
81
- freelist->len = HEAP_SIZE / sizeof (heap_node);
82
- }
82
+ void init_heap () {
83
+ freelist = (heap_node*) heap;
84
+ freelist->next_node = offset_from_node ( list_end);
85
+ freelist->len = HEAP_SIZE / sizeof (heap_node);
86
+ }
83
87
84
88
// How big a chunk we allocate
85
- size_t alloc_size (size_t len)
86
- { return (len + sizeof (heap_node) - 1 ) / sizeof (heap_node) + 1 ; }
87
-
88
- bool is_fallback_ptr ( void *ptr )
89
- { return ptr >= heap && ptr < ( heap + HEAP_SIZE ); }
90
-
91
- void *fallback_malloc (size_t len) {
92
- heap_node *p, *prev;
93
- const size_t nelems = alloc_size ( len );
94
- mutexor mtx ( &heap_mutex );
95
-
96
- if ( NULL == freelist )
97
- init_heap ();
98
-
99
- // Walk the free list, looking for a "big enough" chunk
100
- for (p = freelist, prev = 0 ;
101
- p && p != list_end; prev = p, p = node_from_offset ( p->next_node )) {
102
-
103
- if (p->len > nelems) { // chunk is larger, shorten, and return the tail
104
- heap_node *q;
105
-
106
- p->len = static_cast <heap_size>(p->len - nelems);
107
- q = p + p->len ;
108
- q->next_node = 0 ;
109
- q->len = static_cast <heap_size>(nelems);
110
- return (void *) (q + 1 );
111
- }
112
-
113
- if (p->len == nelems) { // exact size match
114
- if (prev == 0 )
115
- freelist = node_from_offset (p->next_node );
116
- else
117
- prev->next_node = p->next_node ;
118
- p->next_node = 0 ;
119
- return (void *) (p + 1 );
120
- }
89
+ size_t alloc_size (size_t len) {
90
+ return (len + sizeof (heap_node) - 1 ) / sizeof (heap_node) + 1 ;
91
+ }
92
+
93
+ bool is_fallback_ptr (void * ptr) {
94
+ return ptr >= heap && ptr < (heap + HEAP_SIZE);
95
+ }
96
+
97
+ void * fallback_malloc (size_t len) {
98
+ heap_node *p, *prev;
99
+ const size_t nelems = alloc_size (len);
100
+ mutexor mtx (&heap_mutex);
101
+
102
+ if (NULL == freelist)
103
+ init_heap ();
104
+
105
+ // Walk the free list, looking for a "big enough" chunk
106
+ for (p = freelist, prev = 0 ; p && p != list_end;
107
+ prev = p, p = node_from_offset (p->next_node )) {
108
+
109
+ if (p->len > nelems) { // chunk is larger, shorten, and return the tail
110
+ heap_node* q;
111
+
112
+ p->len = static_cast <heap_size>(p->len - nelems);
113
+ q = p + p->len ;
114
+ q->next_node = 0 ;
115
+ q->len = static_cast <heap_size>(nelems);
116
+ return (void *)(q + 1 );
117
+ }
118
+
119
+ if (p->len == nelems) { // exact size match
120
+ if (prev == 0 )
121
+ freelist = node_from_offset (p->next_node );
122
+ else
123
+ prev->next_node = p->next_node ;
124
+ p->next_node = 0 ;
125
+ return (void *)(p + 1 );
121
126
}
122
- return NULL ; // couldn't find a spot big enough
127
+ }
128
+ return NULL ; // couldn't find a spot big enough
123
129
}
124
130
125
131
// Return the start of the next block
126
- heap_node * after ( struct heap_node *p ) { return p + p->len ; }
132
+ heap_node* after ( struct heap_node * p ) { return p + p->len ; }
127
133
128
- void fallback_free (void * ptr) {
129
- struct heap_node * cp = ((struct heap_node *) ptr) - 1 ; // retrieve the chunk
130
- struct heap_node *p, *prev;
134
+ void fallback_free (void * ptr) {
135
+ struct heap_node * cp = ((struct heap_node *) ptr) - 1 ; // retrieve the chunk
136
+ struct heap_node *p, *prev;
131
137
132
- mutexor mtx ( &heap_mutex );
138
+ mutexor mtx ( &heap_mutex);
133
139
134
140
#ifdef DEBUG_FALLBACK_MALLOC
135
- std::cout << " Freeing item at " << offset_from_node ( cp ) << " of size " << cp->len << std::endl;
141
+ std::cout << " Freeing item at " << offset_from_node (cp) << " of size "
142
+ << cp->len << std::endl;
136
143
#endif
137
144
138
- for (p = freelist, prev = 0 ;
139
- p && p != list_end; prev = p, p = node_from_offset (p->next_node )) {
145
+ for (p = freelist, prev = 0 ; p && p != list_end ;
146
+ prev = p, p = node_from_offset (p->next_node )) {
140
147
#ifdef DEBUG_FALLBACK_MALLOC
141
- std::cout << " p, cp, after (p), after(cp) "
142
- << offset_from_node ( p ) << ' '
143
- << offset_from_node ( cp ) << ' '
144
- << offset_from_node ( after ( p )) << ' '
145
- << offset_from_node ( after ( cp )) << std::endl;
148
+ std::cout << " p, cp, after (p), after(cp) " << offset_from_node (p) << ' '
149
+ << offset_from_node (cp) << ' ' << offset_from_node (after (p))
150
+ << ' ' << offset_from_node (after (cp)) << std::endl;
146
151
#endif
147
- if ( after ( p ) == cp ) {
152
+ if (after (p ) == cp) {
148
153
#ifdef DEBUG_FALLBACK_MALLOC
149
- std::cout << " Appending onto chunk at " << offset_from_node ( p ) << std::endl;
154
+ std::cout << " Appending onto chunk at " << offset_from_node (p)
155
+ << std::endl;
150
156
#endif
151
- p->len = static_cast <heap_size>(p-> len + cp-> len ); // make the free heap_node larger
152
- return ;
153
- }
154
- else if ( after ( cp ) == p ) { // there's a free heap_node right after
157
+ p->len = static_cast <heap_size>(
158
+ p-> len + cp-> len ); // make the free heap_node larger
159
+ return ;
160
+ } else if (after (cp ) == p) { // there's a free heap_node right after
155
161
#ifdef DEBUG_FALLBACK_MALLOC
156
- std::cout << " Appending free chunk at " << offset_from_node ( p ) << std::endl;
162
+ std::cout << " Appending free chunk at " << offset_from_node (p)
163
+ << std::endl;
157
164
#endif
158
- cp->len = static_cast <heap_size>(cp->len + p->len );
159
- if ( prev == 0 ) {
160
- freelist = cp;
161
- cp->next_node = p->next_node ;
162
- }
163
- else
164
- prev->next_node = offset_from_node (cp);
165
- return ;
166
- }
167
- }
165
+ cp->len = static_cast <heap_size>(cp->len + p->len );
166
+ if (prev == 0 ) {
167
+ freelist = cp;
168
+ cp->next_node = p->next_node ;
169
+ } else
170
+ prev->next_node = offset_from_node (cp);
171
+ return ;
172
+ }
173
+ }
168
174
// Nothing to merge with, add it to the start of the free list
169
175
#ifdef DEBUG_FALLBACK_MALLOC
170
- std::cout << " Making new free list entry " << offset_from_node ( cp ) << std::endl;
176
+ std::cout << " Making new free list entry " << offset_from_node (cp)
177
+ << std::endl;
171
178
#endif
172
- cp->next_node = offset_from_node ( freelist );
173
- freelist = cp;
179
+ cp->next_node = offset_from_node ( freelist);
180
+ freelist = cp;
174
181
}
175
182
176
183
#ifdef INSTRUMENT_FALLBACK_MALLOC
177
- size_t print_free_list () {
178
- struct heap_node *p, *prev;
179
- heap_size total_free = 0 ;
180
- if ( NULL == freelist )
181
- init_heap ();
182
-
183
- for (p = freelist, prev = 0 ;
184
- p && p != list_end; prev = p, p = node_from_offset (p->next_node )) {
185
- std::cout << ( prev == 0 ? " " : " " ) << " Offset: " << offset_from_node ( p )
186
- << " \t size: " << p->len << " Next: " << p->next_node << std::endl;
187
- total_free += p->len ;
188
- }
189
- std::cout << " Total Free space: " << total_free << std::endl;
190
- return total_free;
191
- }
184
+ size_t print_free_list () {
185
+ struct heap_node *p, *prev;
186
+ heap_size total_free = 0 ;
187
+ if (NULL == freelist)
188
+ init_heap ();
189
+
190
+ for (p = freelist, prev = 0 ; p && p != list_end ;
191
+ prev = p, p = node_from_offset (p->next_node )) {
192
+ std::cout << (prev == 0 ? " " : " " ) << " Offset: " << offset_from_node (p )
193
+ << " \t size: " << p->len << " Next: " << p->next_node << std::endl;
194
+ total_free += p->len ;
195
+ }
196
+ std::cout << " Total Free space: " << total_free << std::endl;
197
+ return total_free;
198
+ }
192
199
#endif
193
- } // end unnamed namespace
200
+ } // end unnamed namespace
194
201
195
202
namespace __cxxabiv1 {
196
203
197
- struct __attribute__ ((aligned)) __aligned_type {};
204
+ struct __attribute__ ((aligned)) __aligned_type {};
198
205
199
- void * __aligned_malloc_with_fallback (size_t size) {
206
+ void * __aligned_malloc_with_fallback (size_t size) {
200
207
#if defined(_WIN32)
201
- if (void * dest = _aligned_malloc (size, alignof (__aligned_type)))
202
- return dest;
208
+ if (void * dest = _aligned_malloc (size, alignof (__aligned_type)))
209
+ return dest;
203
210
#elif defined(_LIBCPP_HAS_NO_ALIGNED_ALLOCATION)
204
- if (void * dest = std::malloc (size))
205
- return dest;
211
+ if (void * dest = std::malloc (size))
212
+ return dest;
206
213
#else
207
- if (size == 0 )
208
- size = 1 ;
209
- void * dest;
210
- if (::posix_memalign (&dest, alignof (__aligned_type), size) == 0 )
211
- return dest;
214
+ if (size == 0 )
215
+ size = 1 ;
216
+ void * dest;
217
+ if (::posix_memalign (&dest, alignof (__aligned_type), size) == 0 )
218
+ return dest;
212
219
#endif
213
- return fallback_malloc (size);
220
+ return fallback_malloc (size);
214
221
}
215
222
216
-
217
- void * __calloc_with_fallback (size_t count, size_t size) {
218
- void *ptr = std::calloc (count, size);
219
- if (NULL != ptr)
220
- return ptr;
221
- // if calloc fails, fall back to emergency stash
222
- ptr = fallback_malloc (size * count);
223
- if (NULL != ptr)
224
- std::memset (ptr, 0 , size * count);
223
+ void * __calloc_with_fallback (size_t count, size_t size) {
224
+ void * ptr = std::calloc (count, size);
225
+ if (NULL != ptr)
225
226
return ptr;
227
+ // if calloc fails, fall back to emergency stash
228
+ ptr = fallback_malloc (size * count);
229
+ if (NULL != ptr)
230
+ std::memset (ptr, 0 , size * count);
231
+ return ptr;
226
232
}
227
233
228
234
void __aligned_free_with_fallback (void * ptr) {
229
235
if (is_fallback_ptr (ptr))
230
- fallback_free (ptr);
236
+ fallback_free (ptr);
231
237
else {
232
238
#if defined(_WIN32)
233
- ::_aligned_free (ptr);
239
+ ::_aligned_free (ptr);
234
240
#else
235
- std::free (ptr);
241
+ std::free (ptr);
236
242
#endif
237
243
}
238
244
}
239
245
240
- void __free_with_fallback (void * ptr) {
241
- if (is_fallback_ptr (ptr))
242
- fallback_free (ptr);
243
- else
244
- std::free (ptr);
246
+ void __free_with_fallback (void * ptr) {
247
+ if (is_fallback_ptr (ptr))
248
+ fallback_free (ptr);
249
+ else
250
+ std::free (ptr);
245
251
}
246
252
247
253
} // namespace __cxxabiv1
0 commit comments