tor  0.4.1.0-alpha-dev
memarea.c
Go to the documentation of this file.
1 /* Copyright (c) 2008-2019, The Tor Project, Inc. */
2 /* See LICENSE for licensing information */
3 
11 #include "orconfig.h"
12 #include "lib/memarea/memarea.h"
13 
14 #include <stdlib.h>
15 #include <string.h>
16 
17 #include "lib/arch/bytes.h"
18 #include "lib/cc/torint.h"
21 #include "lib/log/log.h"
22 #include "lib/log/util_bug.h"
23 #include "lib/malloc/malloc.h"
24 
25 #ifndef DISABLE_MEMORY_SENTINELS
26 
29 #define USE_SENTINELS
30 
33 #define MEMAREA_ALIGN SIZEOF_VOID_P
34 
37 #if MEMAREA_ALIGN == 4
38 #define MEMAREA_ALIGN_MASK ((uintptr_t)3)
39 #elif MEMAREA_ALIGN == 8
40 #define MEMAREA_ALIGN_MASK ((uintptr_t)7)
41 #else
42 #error "void* is neither 4 nor 8 bytes long. I don't know how to align stuff."
43 #endif /* MEMAREA_ALIGN == 4 || ... */
44 
45 #if defined(__GNUC__) && defined(FLEXIBLE_ARRAY_MEMBER)
46 #define USE_ALIGNED_ATTRIBUTE
47 
48 #define U_MEM mem
49 #else
50 #define U_MEM u.mem
51 #endif /* defined(__GNUC__) && defined(FLEXIBLE_ARRAY_MEMBER) */
52 
53 #ifdef USE_SENTINELS
54 
56 #define SENTINEL_VAL 0x90806622u
57 
58 #define SENTINEL_LEN sizeof(uint32_t)
59 
61 #define SET_SENTINEL(chunk) \
62  STMT_BEGIN \
63  set_uint32( &(chunk)->U_MEM[chunk->mem_size], SENTINEL_VAL ); \
64  STMT_END
65 
66 #define CHECK_SENTINEL(chunk) \
67  STMT_BEGIN \
68  uint32_t sent_val = get_uint32(&(chunk)->U_MEM[chunk->mem_size]); \
69  tor_assert(sent_val == SENTINEL_VAL); \
70  STMT_END
71 #else /* !(defined(USE_SENTINELS)) */
72 #define SENTINEL_LEN 0
73 #define SET_SENTINEL(chunk) STMT_NIL
74 #define CHECK_SENTINEL(chunk) STMT_NIL
75 #endif /* defined(USE_SENTINELS) */
76 
78 static inline void *
79 realign_pointer(void *ptr)
80 {
81  uintptr_t x = (uintptr_t)ptr;
82  x = (x+MEMAREA_ALIGN_MASK) & ~MEMAREA_ALIGN_MASK;
83  /* Reinstate this if bug 930 ever reappears
84  tor_assert(((void*)x) >= ptr);
85  */
86  return (void*)x;
87 }
88 
92 typedef struct memarea_chunk_t {
95  size_t mem_size;
96  char *next_mem;
98 #ifdef USE_ALIGNED_ATTRIBUTE
99 
100  char mem[FLEXIBLE_ARRAY_MEMBER] __attribute__((aligned(MEMAREA_ALIGN)));
101 #else
102  union {
103  char mem[1];
105  } u;
107 #endif /* defined(USE_ALIGNED_ATTRIBUTE) */
109 
112 #define CHUNK_HEADER_SIZE offsetof(memarea_chunk_t, U_MEM)
113 
115 #define CHUNK_SIZE 4096
116 
119 struct memarea_t {
121 };
122 
124 static memarea_chunk_t *
125 alloc_chunk(size_t sz)
126 {
128 
129  size_t chunk_size = sz < CHUNK_SIZE ? CHUNK_SIZE : sz;
130  memarea_chunk_t *res;
131  chunk_size += SENTINEL_LEN;
132  res = tor_malloc(chunk_size);
133  res->next_chunk = NULL;
134  res->mem_size = chunk_size - CHUNK_HEADER_SIZE - SENTINEL_LEN;
135  res->next_mem = res->U_MEM;
137  ((char*)res)+chunk_size);
138  tor_assert(realign_pointer(res->next_mem) == res->next_mem);
139  SET_SENTINEL(res);
140  return res;
141 }
142 
144 static void
146 {
147  CHECK_SENTINEL(chunk);
148  tor_free(chunk);
149 }
150 
152 memarea_t *
154 {
155  memarea_t *head = tor_malloc(sizeof(memarea_t));
156  head->first = alloc_chunk(CHUNK_SIZE);
157  return head;
158 }
159 
162 void
164 {
165  memarea_chunk_t *chunk, *next;
166  for (chunk = area->first; chunk; chunk = next) {
167  next = chunk->next_chunk;
169  }
170  area->first = NULL; /*fail fast on */
171  tor_free(area);
172 }
173 
177 void
179 {
180  memarea_chunk_t *chunk, *next;
181  if (area->first->next_chunk) {
182  for (chunk = area->first->next_chunk; chunk; chunk = next) {
183  next = chunk->next_chunk;
185  }
186  area->first->next_chunk = NULL;
187  }
188  area->first->next_mem = area->first->U_MEM;
189 }
190 
193 int
194 memarea_owns_ptr(const memarea_t *area, const void *p)
195 {
196  memarea_chunk_t *chunk;
197  const char *ptr = p;
198  for (chunk = area->first; chunk; chunk = chunk->next_chunk) {
199  if (ptr >= chunk->U_MEM && ptr < chunk->next_mem)
200  return 1;
201  }
202  return 0;
203 }
204 
208 void *
209 memarea_alloc(memarea_t *area, size_t sz)
210 {
211  memarea_chunk_t *chunk = area->first;
212  char *result;
213  tor_assert(chunk);
214  CHECK_SENTINEL(chunk);
216  if (sz == 0)
217  sz = 1;
218  tor_assert(chunk->next_mem <= chunk->U_MEM + chunk->mem_size);
219  const size_t space_remaining =
220  (chunk->U_MEM + chunk->mem_size) - chunk->next_mem;
221  if (sz > space_remaining) {
222  if (sz+CHUNK_HEADER_SIZE >= CHUNK_SIZE) {
223  /* This allocation is too big. Stick it in a special chunk, and put
224  * that chunk second in the list. */
226  new_chunk->next_chunk = chunk->next_chunk;
227  chunk->next_chunk = new_chunk;
228  chunk = new_chunk;
229  } else {
230  memarea_chunk_t *new_chunk = alloc_chunk(CHUNK_SIZE);
231  new_chunk->next_chunk = chunk;
232  area->first = chunk = new_chunk;
233  }
234  tor_assert(chunk->mem_size >= sz);
235  }
236  result = chunk->next_mem;
237  chunk->next_mem = chunk->next_mem + sz;
238  /* Reinstate these if bug 930 ever comes back
239  tor_assert(chunk->next_mem >= chunk->U_MEM);
240  tor_assert(chunk->next_mem <= chunk->U_MEM+chunk->mem_size);
241  */
242  chunk->next_mem = realign_pointer(chunk->next_mem);
243  return result;
244 }
245 
247 void *
248 memarea_alloc_zero(memarea_t *area, size_t sz)
249 {
250  void *result = memarea_alloc(area, sz);
251  memset(result, 0, sz);
252  return result;
253 }
254 
256 void *
257 memarea_memdup(memarea_t *area, const void *s, size_t n)
258 {
259  char *result = memarea_alloc(area, n);
260  memcpy(result, s, n);
261  return result;
262 }
263 
265 char *
266 memarea_strdup(memarea_t *area, const char *s)
267 {
268  return memarea_memdup(area, s, strlen(s)+1);
269 }
270 
272 char *
273 memarea_strndup(memarea_t *area, const char *s, size_t n)
274 {
275  size_t ln = 0;
276  char *result;
278  for (ln = 0; ln < n && s[ln]; ++ln)
279  ;
280  result = memarea_alloc(area, ln+1);
281  memcpy(result, s, ln);
282  result[ln]='\0';
283  return result;
284 }
285 
288 void
289 memarea_get_stats(memarea_t *area, size_t *allocated_out, size_t *used_out)
290 {
291  size_t a = 0, u = 0;
292  memarea_chunk_t *chunk;
293  for (chunk = area->first; chunk; chunk = chunk->next_chunk) {
294  CHECK_SENTINEL(chunk);
295  a += CHUNK_HEADER_SIZE + chunk->mem_size;
296  tor_assert(chunk->next_mem >= chunk->U_MEM);
297  u += CHUNK_HEADER_SIZE + (chunk->next_mem - chunk->U_MEM);
298  }
299  *allocated_out = a;
300  *used_out = u;
301 }
302 
304 void
306 {
307  memarea_chunk_t *chunk;
308  tor_assert(area->first);
309 
310  for (chunk = area->first; chunk; chunk = chunk->next_chunk) {
311  CHECK_SENTINEL(chunk);
312  tor_assert(chunk->next_mem >= chunk->U_MEM);
313  tor_assert(chunk->next_mem <=
314  (char*) realign_pointer(chunk->U_MEM+chunk->mem_size));
315  }
316 }
317 
318 #else /* !(!defined(DISABLE_MEMORY_SENTINELS)) */
319 
320 struct memarea_t {
321  smartlist_t *pieces;
322 };
323 
324 memarea_t *
325 memarea_new(void)
326 {
327  memarea_t *ma = tor_malloc_zero(sizeof(memarea_t));
328  ma->pieces = smartlist_new();
329  return ma;
330 }
331 void
333 {
334  memarea_clear(area);
335  smartlist_free(area->pieces);
336  tor_free(area);
337 }
338 void
340 {
341  SMARTLIST_FOREACH(area->pieces, void *, p, tor_free_(p));
342  smartlist_clear(area->pieces);
343 }
344 int
345 memarea_owns_ptr(const memarea_t *area, const void *ptr)
346 {
347  SMARTLIST_FOREACH(area->pieces, const void *, p, if (ptr == p) return 1;);
348  return 0;
349 }
350 
351 void *
352 memarea_alloc(memarea_t *area, size_t sz)
353 {
354  void *result = tor_malloc(sz);
355  smartlist_add(area->pieces, result);
356  return result;
357 }
358 
359 void *
360 memarea_alloc_zero(memarea_t *area, size_t sz)
361 {
362  void *result = tor_malloc_zero(sz);
363  smartlist_add(area->pieces, result);
364  return result;
365 }
366 void *
367 memarea_memdup(memarea_t *area, const void *s, size_t n)
368 {
369  void *r = memarea_alloc(area, n);
370  memcpy(r, s, n);
371  return r;
372 }
373 char *
374 memarea_strdup(memarea_t *area, const char *s)
375 {
376  size_t n = strlen(s);
377  char *r = memarea_alloc(area, n+1);
378  memcpy(r, s, n);
379  r[n] = 0;
380  return r;
381 }
382 char *
383 memarea_strndup(memarea_t *area, const char *s, size_t n)
384 {
385  size_t ln = strnlen(s, n);
386  char *r = memarea_alloc(area, ln+1);
387  memcpy(r, s, ln);
388  r[ln] = 0;
389  return r;
390 }
391 void
393  size_t *allocated_out, size_t *used_out)
394 {
395  (void)area;
396  *allocated_out = *used_out = 128;
397 }
398 void
400 {
401  (void)area;
402 }
403 
404 #endif /* !defined(DISABLE_MEMORY_SENTINELS) */
void memarea_drop_all_(memarea_t *area)
Definition: memarea.c:163
char * memarea_strndup(memarea_t *area, const char *s, size_t n)
Definition: memarea.c:273
memarea_chunk_t * first
Definition: memarea.c:120
void smartlist_add(smartlist_t *sl, void *element)
static void memarea_chunk_free_unchecked(memarea_chunk_t *chunk)
Definition: memarea.c:145
void * memarea_alloc_zero(memarea_t *area, size_t sz)
Definition: memarea.c:248
#define tor_free(p)
Definition: malloc.h:52
#define SET_SENTINEL(chunk)
Definition: memarea.c:61
int memarea_owns_ptr(const memarea_t *area, const void *p)
Definition: memarea.c:194
Integer definitions used throughout Tor.
Headers for util_malloc.c.
Top-level declarations for the smartlist_t dynamic array type.
void memarea_clear(memarea_t *area)
Definition: memarea.c:178
memarea_t * memarea_new(void)
Definition: memarea.c:153
Macros for iterating over the elements of a smartlist_t.
void * memarea_memdup(memarea_t *area, const void *s, size_t n)
Definition: memarea.c:257
#define MEMAREA_ALIGN
Definition: memarea.c:33
tor_assert(buffer)
void * void_for_alignment_
Definition: memarea.c:104
#define SIZE_T_CEILING
Definition: torint.h:126
static void * realign_pointer(void *ptr)
Definition: memarea.c:79
char mem[1]
Definition: memarea.c:103
Header for memarea.c.
#define SENTINEL_LEN
Definition: memarea.c:58
void tor_free_(void *mem)
Definition: malloc.c:227
void * memarea_alloc(memarea_t *area, size_t sz)
Definition: memarea.c:209
char * memarea_strdup(memarea_t *area, const char *s)
Definition: memarea.c:266
#define SMARTLIST_FOREACH(sl, type, var, cmd)
Inline functions for reading and writing multibyte values from the middle of strings, and for manipulating byte order.
#define CHUNK_SIZE
Definition: memarea.c:115
void memarea_assert_ok(memarea_t *area)
Definition: memarea.c:305
static memarea_chunk_t * alloc_chunk(size_t sz)
Definition: memarea.c:125
void memarea_get_stats(memarea_t *area, size_t *allocated_out, size_t *used_out)
Definition: memarea.c:289
char * next_mem
Definition: memarea.c:96
#define CHECK_SENTINEL(chunk)
Definition: memarea.c:66
Headers for log.c.
#define CHUNK_HEADER_SIZE
Definition: memarea.c:112
Macros to manage assertions, fatal and non-fatal.
size_t mem_size
Definition: memarea.c:95
void smartlist_clear(smartlist_t *sl)
struct memarea_chunk_t memarea_chunk_t
union memarea_chunk_t::@4 u
struct memarea_chunk_t * next_chunk
Definition: memarea.c:94