1*10465441SEvalZero /* 2*10465441SEvalZero * Copyright (c) 2001-2004 Swedish Institute of Computer Science. 3*10465441SEvalZero * All rights reserved. 4*10465441SEvalZero * 5*10465441SEvalZero * Redistribution and use in source and binary forms, with or without modification, 6*10465441SEvalZero * are permitted provided that the following conditions are met: 7*10465441SEvalZero * 8*10465441SEvalZero * 1. Redistributions of source code must retain the above copyright notice, 9*10465441SEvalZero * this list of conditions and the following disclaimer. 10*10465441SEvalZero * 2. Redistributions in binary form must reproduce the above copyright notice, 11*10465441SEvalZero * this list of conditions and the following disclaimer in the documentation 12*10465441SEvalZero * and/or other materials provided with the distribution. 13*10465441SEvalZero * 3. The name of the author may not be used to endorse or promote products 14*10465441SEvalZero * derived from this software without specific prior written permission. 15*10465441SEvalZero * 16*10465441SEvalZero * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED 17*10465441SEvalZero * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF 18*10465441SEvalZero * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT 19*10465441SEvalZero * SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, 20*10465441SEvalZero * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT 21*10465441SEvalZero * OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 22*10465441SEvalZero * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 23*10465441SEvalZero * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING 24*10465441SEvalZero * IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY 25*10465441SEvalZero * OF SUCH DAMAGE. 26*10465441SEvalZero * 27*10465441SEvalZero * This file is part of the lwIP TCP/IP stack. 28*10465441SEvalZero * 29*10465441SEvalZero * Author: Adam Dunkels <[email protected]> 30*10465441SEvalZero * 31*10465441SEvalZero */ 32*10465441SEvalZero #ifndef __LWIP_MEM_H__ 33*10465441SEvalZero #define __LWIP_MEM_H__ 34*10465441SEvalZero 35*10465441SEvalZero #include "lwip/opt.h" 36*10465441SEvalZero 37*10465441SEvalZero #ifdef __cplusplus 38*10465441SEvalZero extern "C" { 39*10465441SEvalZero #endif 40*10465441SEvalZero 41*10465441SEvalZero #if MEM_LIBC_MALLOC 42*10465441SEvalZero 43*10465441SEvalZero #include <stddef.h> /* for size_t */ 44*10465441SEvalZero 45*10465441SEvalZero typedef size_t mem_size_t; 46*10465441SEvalZero #define MEM_SIZE_F SZT_F 47*10465441SEvalZero 48*10465441SEvalZero /* aliases for C library malloc() */ 49*10465441SEvalZero #define mem_init() 50*10465441SEvalZero /* in case C library malloc() needs extra protection, 51*10465441SEvalZero * allow these defines to be overridden. 52*10465441SEvalZero */ 53*10465441SEvalZero #ifndef mem_free 54*10465441SEvalZero #define mem_free free 55*10465441SEvalZero #endif 56*10465441SEvalZero #ifndef mem_malloc 57*10465441SEvalZero #define mem_malloc malloc 58*10465441SEvalZero #endif 59*10465441SEvalZero #ifndef mem_calloc 60*10465441SEvalZero #define mem_calloc calloc 61*10465441SEvalZero #endif 62*10465441SEvalZero /* Since there is no C library allocation function to shrink memory without 63*10465441SEvalZero moving it, define this to nothing. */ 64*10465441SEvalZero #ifndef mem_trim 65*10465441SEvalZero #define mem_trim(mem, size) (mem) 66*10465441SEvalZero #endif 67*10465441SEvalZero #else /* MEM_LIBC_MALLOC */ 68*10465441SEvalZero 69*10465441SEvalZero /* MEM_SIZE would have to be aligned, but using 64000 here instead of 70*10465441SEvalZero * 65535 leaves some room for alignment... 71*10465441SEvalZero */ 72*10465441SEvalZero #if MEM_SIZE > 64000L 73*10465441SEvalZero typedef u32_t mem_size_t; 74*10465441SEvalZero #define MEM_SIZE_F U32_F 75*10465441SEvalZero #else 76*10465441SEvalZero typedef u16_t mem_size_t; 77*10465441SEvalZero #define MEM_SIZE_F U16_F 78*10465441SEvalZero #endif /* MEM_SIZE > 64000 */ 79*10465441SEvalZero 80*10465441SEvalZero #if MEM_USE_POOLS 81*10465441SEvalZero /** mem_init is not used when using pools instead of a heap */ 82*10465441SEvalZero #define mem_init() 83*10465441SEvalZero /** mem_trim is not used when using pools instead of a heap: 84*10465441SEvalZero we can't free part of a pool element and don't want to copy the rest */ 85*10465441SEvalZero #define mem_trim(mem, size) (mem) 86*10465441SEvalZero #else /* MEM_USE_POOLS */ 87*10465441SEvalZero /* lwIP alternative malloc */ 88*10465441SEvalZero void mem_init(void); 89*10465441SEvalZero void *mem_trim(void *mem, mem_size_t size); 90*10465441SEvalZero #endif /* MEM_USE_POOLS */ 91*10465441SEvalZero void *mem_malloc(mem_size_t size); 92*10465441SEvalZero void *mem_calloc(mem_size_t count, mem_size_t size); 93*10465441SEvalZero void mem_free(void *mem); 94*10465441SEvalZero #endif /* MEM_LIBC_MALLOC */ 95*10465441SEvalZero 96*10465441SEvalZero /** Calculate memory size for an aligned buffer - returns the next highest 97*10465441SEvalZero * multiple of MEM_ALIGNMENT (e.g. LWIP_MEM_ALIGN_SIZE(3) and 98*10465441SEvalZero * LWIP_MEM_ALIGN_SIZE(4) will both yield 4 for MEM_ALIGNMENT == 4). 99*10465441SEvalZero */ 100*10465441SEvalZero #ifndef LWIP_MEM_ALIGN_SIZE 101*10465441SEvalZero #define LWIP_MEM_ALIGN_SIZE(size) (((size) + MEM_ALIGNMENT - 1) & ~(MEM_ALIGNMENT-1)) 102*10465441SEvalZero #endif 103*10465441SEvalZero 104*10465441SEvalZero /** Calculate safe memory size for an aligned buffer when using an unaligned 105*10465441SEvalZero * type as storage. This includes a safety-margin on (MEM_ALIGNMENT - 1) at the 106*10465441SEvalZero * start (e.g. if buffer is u8_t[] and actual data will be u32_t*) 107*10465441SEvalZero */ 108*10465441SEvalZero #ifndef LWIP_MEM_ALIGN_BUFFER 109*10465441SEvalZero #define LWIP_MEM_ALIGN_BUFFER(size) (((size) + MEM_ALIGNMENT - 1)) 110*10465441SEvalZero #endif 111*10465441SEvalZero 112*10465441SEvalZero /** Align a memory pointer to the alignment defined by MEM_ALIGNMENT 113*10465441SEvalZero * so that ADDR % MEM_ALIGNMENT == 0 114*10465441SEvalZero */ 115*10465441SEvalZero #ifndef LWIP_MEM_ALIGN 116*10465441SEvalZero #define LWIP_MEM_ALIGN(addr) ((void *)(((mem_ptr_t)(addr) + MEM_ALIGNMENT - 1) & ~(mem_ptr_t)(MEM_ALIGNMENT-1))) 117*10465441SEvalZero #endif 118*10465441SEvalZero 119*10465441SEvalZero #ifdef __cplusplus 120*10465441SEvalZero } 121*10465441SEvalZero #endif 122*10465441SEvalZero 123*10465441SEvalZero #endif /* __LWIP_MEM_H__ */ 124