1b8e80941Smrg/**************************************************************************
2b8e80941Smrg *
3b8e80941Smrg * Copyright 2008-2010 VMware, Inc.
4b8e80941Smrg * All Rights Reserved.
5b8e80941Smrg *
6b8e80941Smrg * Permission is hereby granted, free of charge, to any person obtaining a
7b8e80941Smrg * copy of this software and associated documentation files (the
8b8e80941Smrg * "Software"), to deal in the Software without restriction, including
9b8e80941Smrg * without limitation the rights to use, copy, modify, merge, publish,
10b8e80941Smrg * distribute, sub license, and/or sell copies of the Software, and to
11b8e80941Smrg * permit persons to whom the Software is furnished to do so, subject to
12b8e80941Smrg * the following conditions:
13b8e80941Smrg *
14b8e80941Smrg * The above copyright notice and this permission notice (including the
15b8e80941Smrg * next paragraph) shall be included in all copies or substantial portions
16b8e80941Smrg * of the Software.
17b8e80941Smrg *
18b8e80941Smrg * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19b8e80941Smrg * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20b8e80941Smrg * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21b8e80941Smrg * IN NO EVENT SHALL VMWARE AND/OR ITS SUPPLIERS BE LIABLE FOR
22b8e80941Smrg * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23b8e80941Smrg * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24b8e80941Smrg * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25b8e80941Smrg *
26b8e80941Smrg **************************************************************************/
27b8e80941Smrg
28b8e80941Smrg
29b8e80941Smrg/*
30b8e80941Smrg * Memory alignment wrappers.
31b8e80941Smrg */
32b8e80941Smrg
33b8e80941Smrg
34b8e80941Smrg#ifndef _OS_MEMORY_H_
35b8e80941Smrg#error "Must not be included directly. Include os_memory.h instead"
36b8e80941Smrg#endif
37b8e80941Smrg
38b8e80941Smrg
39b8e80941Smrg#include "pipe/p_compiler.h"
40b8e80941Smrg
41b8e80941Smrg
42b8e80941Smrg
43b8e80941Smrg/**
44b8e80941Smrg * Add two size_t values with integer overflow check.
45b8e80941Smrg * TODO: leverage __builtin_add_overflow where available
46b8e80941Smrg */
47b8e80941Smrgstatic inline bool
48b8e80941Smrgadd_overflow_size_t(size_t a, size_t b, size_t *res)
49b8e80941Smrg{
50b8e80941Smrg   *res = a + b;
51b8e80941Smrg   return *res < a || *res < b;
52b8e80941Smrg}
53b8e80941Smrg
54b8e80941Smrg
55b8e80941Smrg/**
56b8e80941Smrg * Return memory on given byte alignment
57b8e80941Smrg */
58b8e80941Smrgstatic inline void *
59b8e80941Smrgos_malloc_aligned(size_t size, size_t alignment)
60b8e80941Smrg{
61b8e80941Smrg   char *ptr, *buf;
62b8e80941Smrg   size_t alloc_size;
63b8e80941Smrg
64b8e80941Smrg   /*
65b8e80941Smrg    * Calculate
66b8e80941Smrg    *
67b8e80941Smrg    *   alloc_size = size + alignment + sizeof(void *)
68b8e80941Smrg    *
69b8e80941Smrg    * while checking for overflow.
70b8e80941Smrg    */
71b8e80941Smrg   if (add_overflow_size_t(size, alignment, &alloc_size) ||
72b8e80941Smrg       add_overflow_size_t(alloc_size, sizeof(void *), &alloc_size)) {
73b8e80941Smrg      return NULL;
74b8e80941Smrg   }
75b8e80941Smrg
76b8e80941Smrg   ptr = (char *) os_malloc(alloc_size);
77b8e80941Smrg   if (!ptr)
78b8e80941Smrg      return NULL;
79b8e80941Smrg
80b8e80941Smrg   buf = (char *)(((uintptr_t)ptr + sizeof(void *) + alignment - 1) & ~((uintptr_t)(alignment - 1)));
81b8e80941Smrg   *(char **)(buf - sizeof(void *)) = ptr;
82b8e80941Smrg
83b8e80941Smrg   return buf;
84b8e80941Smrg}
85b8e80941Smrg
86b8e80941Smrg
87b8e80941Smrg/**
88b8e80941Smrg * Free memory returned by os_malloc_aligned().
89b8e80941Smrg */
90b8e80941Smrgstatic inline void
91b8e80941Smrgos_free_aligned(void *ptr)
92b8e80941Smrg{
93b8e80941Smrg   if (ptr) {
94b8e80941Smrg      void **cubbyHole = (void **) ((char *) ptr - sizeof(void *));
95b8e80941Smrg      void *realAddr = *cubbyHole;
96b8e80941Smrg      os_free(realAddr);
97b8e80941Smrg   }
98b8e80941Smrg}
99