2010-08-26 18:50:56 +00:00
|
|
|
/*
|
|
|
|
* CDDL HEADER START
|
|
|
|
*
|
|
|
|
* The contents of this file are subject to the terms of the
|
|
|
|
* Common Development and Distribution License, Version 1.0 only
|
|
|
|
* (the "License"). You may not use this file except in compliance
|
|
|
|
* with the License.
|
|
|
|
*
|
|
|
|
* You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
|
2022-07-11 21:16:13 +00:00
|
|
|
* or https://opensource.org/licenses/CDDL-1.0.
|
2010-08-26 18:50:56 +00:00
|
|
|
* See the License for the specific language governing permissions
|
|
|
|
* and limitations under the License.
|
|
|
|
*
|
|
|
|
* When distributing Covered Code, include this CDDL HEADER in each
|
|
|
|
* file and include the License file at usr/src/OPENSOLARIS.LICENSE.
|
|
|
|
* If applicable, add the following below this CDDL HEADER, with the
|
|
|
|
* fields enclosed by brackets "[]" replaced with your own identifying
|
|
|
|
* information: Portions Copyright [yyyy] [name of copyright owner]
|
|
|
|
*
|
|
|
|
* CDDL HEADER END
|
|
|
|
*/
|
|
|
|
/*
|
|
|
|
* Copyright 2008 Sun Microsystems, Inc. All rights reserved.
|
|
|
|
* Use is subject to license terms.
|
|
|
|
*/
|
|
|
|
|
|
|
|
#ifndef _LIBSPL_UMEM_H
|
2013-11-01 19:26:11 +00:00
|
|
|
#define _LIBSPL_UMEM_H
|
2010-08-26 18:50:56 +00:00
|
|
|
|
2013-11-01 19:26:11 +00:00
|
|
|
/*
|
|
|
|
* XXX: We should use the real portable umem library if it is detected
|
2013-03-30 02:27:50 +00:00
|
|
|
* at configure time. However, if the library is not available, we can
|
2010-08-26 18:50:56 +00:00
|
|
|
* use a trivial malloc based implementation. This obviously impacts
|
2013-03-30 02:27:50 +00:00
|
|
|
* performance, but unless you are using a full userspace build of zpool for
|
|
|
|
* something other than ztest, you are likely not going to notice or care.
|
2010-08-26 18:50:56 +00:00
|
|
|
*
|
|
|
|
* https://labs.omniti.com/trac/portableumem
|
|
|
|
*/
|
2020-07-24 00:41:48 +00:00
|
|
|
#include <sys/debug.h>
|
2010-08-26 18:50:56 +00:00
|
|
|
|
|
|
|
#include <stdlib.h>
|
|
|
|
#include <stdio.h>
|
2018-02-16 01:53:18 +00:00
|
|
|
#include <string.h>
|
2010-08-26 18:50:56 +00:00
|
|
|
|
|
|
|
#ifdef __cplusplus
|
|
|
|
extern "C" {
|
|
|
|
#endif
|
|
|
|
|
|
|
|
typedef void vmem_t;
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Flags for umem_alloc/umem_free
|
|
|
|
*/
|
2013-11-01 19:26:11 +00:00
|
|
|
#define UMEM_DEFAULT 0x0000 /* normal -- may fail */
|
|
|
|
#define UMEM_NOFAIL 0x0100 /* Never fails */
|
2010-08-26 18:50:56 +00:00
|
|
|
|
|
|
|
/*
|
|
|
|
* Flags for umem_cache_create()
|
|
|
|
*/
|
2013-11-01 19:26:11 +00:00
|
|
|
#define UMC_NODEBUG 0x00020000
|
2010-08-26 18:50:56 +00:00
|
|
|
|
2013-11-01 19:26:11 +00:00
|
|
|
#define UMEM_CACHE_NAMELEN 31
|
2010-08-26 18:50:56 +00:00
|
|
|
|
|
|
|
typedef int umem_nofail_callback_t(void);
|
|
|
|
typedef int umem_constructor_t(void *, void *, int);
|
|
|
|
typedef void umem_destructor_t(void *, void *);
|
|
|
|
typedef void umem_reclaim_t(void *);
|
|
|
|
|
|
|
|
typedef struct umem_cache {
|
|
|
|
char cache_name[UMEM_CACHE_NAMELEN + 1];
|
|
|
|
size_t cache_bufsize;
|
|
|
|
size_t cache_align;
|
|
|
|
umem_constructor_t *cache_constructor;
|
|
|
|
umem_destructor_t *cache_destructor;
|
|
|
|
umem_reclaim_t *cache_reclaim;
|
|
|
|
void *cache_private;
|
|
|
|
void *cache_arena;
|
|
|
|
int cache_cflags;
|
|
|
|
} umem_cache_t;
|
|
|
|
|
2020-06-11 20:38:25 +00:00
|
|
|
/* Prototypes for functions to provide defaults for umem envvars */
|
|
|
|
const char *_umem_debug_init(void);
|
|
|
|
const char *_umem_options_init(void);
|
|
|
|
const char *_umem_logging_init(void);
|
|
|
|
|
Reduce false positives from Static Analyzers
Both Clang's Static Analyzer and Synopsys' Coverity would ignore
assertions. Following Clang's advice, we annotate our assertions:
https://clang-analyzer.llvm.org/annotations.html#custom_assertions
This makes both Clang's Static Analyzer and Coverity properly identify
assertions. This change reduced Clang's reported defects from 246 to
180. It also reduced the false positives reported by Coverityi by 10,
while enabling Coverity to find 9 more defects that previously were
false negatives.
A couple examples of this would be CID-1524417 and CID-1524423. After
submitting a build to coverity with the modified assertions, CID-1524417
disappeared while the report for CID-1524423 no longer claimed that the
assertion tripped.
Coincidentally, it turns out that it is possible to more accurately
annotate our headers than the Coverity modelling file permits in the
case of format strings. Since we can do that and this patch annotates
headers whenever `__coverity_panic__()` would have been used in the
model file, we drop all models that use `__coverity_panic__()` from the
model file.
Upon seeing the success in eliminating false positives involving
assertions, it occurred to me that we could also modify our headers to
eliminate coverity's false positives involving byte swaps. We now have
coverity specific byteswap macros, that do nothing, to disable
Coverity's false positives when we do byte swaps. This allowed us to
also drop the byteswap definitions from the model file.
Lastly, a model file update has been done beyond the mentioned
deletions:
* The definitions of `umem_alloc_aligned()`, `umem_alloc()` andi
`umem_zalloc()` were originally implemented in a way that was
intended to inform coverity that when KM_SLEEP has been passed these
functions, they do not return NULL. A small error in how this was
done was found, so we correct it.
* Definitions for umem_cache_alloc() and umem_cache_free() have been
added.
In practice, no false positives were avoided by making these changes,
but in the interest of correctness from future coverity builds, we make
them anyway.
Reviewed-by: Brian Behlendorf <behlendorf1@llnl.gov>
Reviewed-by: Ryan Moeller <ryan@iXsystems.com>
Signed-off-by: Richard Yao <richard.yao@alumni.stonybrook.edu>
Closes #13902
2022-09-30 22:30:12 +00:00
|
|
|
__attribute__((alloc_size(1)))
|
2010-08-26 18:50:56 +00:00
|
|
|
static inline void *
|
|
|
|
umem_alloc(size_t size, int flags)
|
|
|
|
{
|
2014-05-21 09:17:23 +00:00
|
|
|
void *ptr = NULL;
|
2010-08-26 18:50:56 +00:00
|
|
|
|
|
|
|
do {
|
|
|
|
ptr = malloc(size);
|
|
|
|
} while (ptr == NULL && (flags & UMEM_NOFAIL));
|
|
|
|
|
2013-11-01 19:26:11 +00:00
|
|
|
return (ptr);
|
2010-08-26 18:50:56 +00:00
|
|
|
}
|
|
|
|
|
Reduce false positives from Static Analyzers
Both Clang's Static Analyzer and Synopsys' Coverity would ignore
assertions. Following Clang's advice, we annotate our assertions:
https://clang-analyzer.llvm.org/annotations.html#custom_assertions
This makes both Clang's Static Analyzer and Coverity properly identify
assertions. This change reduced Clang's reported defects from 246 to
180. It also reduced the false positives reported by Coverityi by 10,
while enabling Coverity to find 9 more defects that previously were
false negatives.
A couple examples of this would be CID-1524417 and CID-1524423. After
submitting a build to coverity with the modified assertions, CID-1524417
disappeared while the report for CID-1524423 no longer claimed that the
assertion tripped.
Coincidentally, it turns out that it is possible to more accurately
annotate our headers than the Coverity modelling file permits in the
case of format strings. Since we can do that and this patch annotates
headers whenever `__coverity_panic__()` would have been used in the
model file, we drop all models that use `__coverity_panic__()` from the
model file.
Upon seeing the success in eliminating false positives involving
assertions, it occurred to me that we could also modify our headers to
eliminate coverity's false positives involving byte swaps. We now have
coverity specific byteswap macros, that do nothing, to disable
Coverity's false positives when we do byte swaps. This allowed us to
also drop the byteswap definitions from the model file.
Lastly, a model file update has been done beyond the mentioned
deletions:
* The definitions of `umem_alloc_aligned()`, `umem_alloc()` andi
`umem_zalloc()` were originally implemented in a way that was
intended to inform coverity that when KM_SLEEP has been passed these
functions, they do not return NULL. A small error in how this was
done was found, so we correct it.
* Definitions for umem_cache_alloc() and umem_cache_free() have been
added.
In practice, no false positives were avoided by making these changes,
but in the interest of correctness from future coverity builds, we make
them anyway.
Reviewed-by: Brian Behlendorf <behlendorf1@llnl.gov>
Reviewed-by: Ryan Moeller <ryan@iXsystems.com>
Signed-off-by: Richard Yao <richard.yao@alumni.stonybrook.edu>
Closes #13902
2022-09-30 22:30:12 +00:00
|
|
|
__attribute__((alloc_size(1)))
|
2010-08-26 18:50:56 +00:00
|
|
|
static inline void *
|
|
|
|
umem_alloc_aligned(size_t size, size_t align, int flags)
|
|
|
|
{
|
2014-05-21 09:17:23 +00:00
|
|
|
void *ptr = NULL;
|
|
|
|
int rc = EINVAL;
|
2010-08-26 18:50:56 +00:00
|
|
|
|
|
|
|
do {
|
|
|
|
rc = posix_memalign(&ptr, align, size);
|
|
|
|
} while (rc == ENOMEM && (flags & UMEM_NOFAIL));
|
|
|
|
|
|
|
|
if (rc == EINVAL) {
|
|
|
|
fprintf(stderr, "%s: invalid memory alignment (%zd)\n",
|
|
|
|
__func__, align);
|
|
|
|
if (flags & UMEM_NOFAIL)
|
|
|
|
abort();
|
2013-11-01 19:26:11 +00:00
|
|
|
return (NULL);
|
2010-08-26 18:50:56 +00:00
|
|
|
}
|
|
|
|
|
2013-11-01 19:26:11 +00:00
|
|
|
return (ptr);
|
2010-08-26 18:50:56 +00:00
|
|
|
}
|
|
|
|
|
Reduce false positives from Static Analyzers
Both Clang's Static Analyzer and Synopsys' Coverity would ignore
assertions. Following Clang's advice, we annotate our assertions:
https://clang-analyzer.llvm.org/annotations.html#custom_assertions
This makes both Clang's Static Analyzer and Coverity properly identify
assertions. This change reduced Clang's reported defects from 246 to
180. It also reduced the false positives reported by Coverityi by 10,
while enabling Coverity to find 9 more defects that previously were
false negatives.
A couple examples of this would be CID-1524417 and CID-1524423. After
submitting a build to coverity with the modified assertions, CID-1524417
disappeared while the report for CID-1524423 no longer claimed that the
assertion tripped.
Coincidentally, it turns out that it is possible to more accurately
annotate our headers than the Coverity modelling file permits in the
case of format strings. Since we can do that and this patch annotates
headers whenever `__coverity_panic__()` would have been used in the
model file, we drop all models that use `__coverity_panic__()` from the
model file.
Upon seeing the success in eliminating false positives involving
assertions, it occurred to me that we could also modify our headers to
eliminate coverity's false positives involving byte swaps. We now have
coverity specific byteswap macros, that do nothing, to disable
Coverity's false positives when we do byte swaps. This allowed us to
also drop the byteswap definitions from the model file.
Lastly, a model file update has been done beyond the mentioned
deletions:
* The definitions of `umem_alloc_aligned()`, `umem_alloc()` andi
`umem_zalloc()` were originally implemented in a way that was
intended to inform coverity that when KM_SLEEP has been passed these
functions, they do not return NULL. A small error in how this was
done was found, so we correct it.
* Definitions for umem_cache_alloc() and umem_cache_free() have been
added.
In practice, no false positives were avoided by making these changes,
but in the interest of correctness from future coverity builds, we make
them anyway.
Reviewed-by: Brian Behlendorf <behlendorf1@llnl.gov>
Reviewed-by: Ryan Moeller <ryan@iXsystems.com>
Signed-off-by: Richard Yao <richard.yao@alumni.stonybrook.edu>
Closes #13902
2022-09-30 22:30:12 +00:00
|
|
|
__attribute__((alloc_size(1)))
|
2010-08-26 18:50:56 +00:00
|
|
|
static inline void *
|
|
|
|
umem_zalloc(size_t size, int flags)
|
|
|
|
{
|
2014-05-21 09:17:23 +00:00
|
|
|
void *ptr = NULL;
|
2010-08-26 18:50:56 +00:00
|
|
|
|
|
|
|
ptr = umem_alloc(size, flags);
|
|
|
|
if (ptr)
|
|
|
|
memset(ptr, 0, size);
|
|
|
|
|
2013-11-01 19:26:11 +00:00
|
|
|
return (ptr);
|
2010-08-26 18:50:56 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
static inline void
|
2022-01-14 23:37:55 +00:00
|
|
|
umem_free(const void *ptr, size_t size __maybe_unused)
|
2010-08-26 18:50:56 +00:00
|
|
|
{
|
2022-01-14 23:37:55 +00:00
|
|
|
free((void *)ptr);
|
2010-08-26 18:50:56 +00:00
|
|
|
}
|
|
|
|
|
2022-10-26 22:08:31 +00:00
|
|
|
/*
|
|
|
|
* umem_free_aligned was added for supporting portability
|
|
|
|
* with non-POSIX platforms that require a different free
|
|
|
|
* to be used with aligned allocations.
|
|
|
|
*/
|
|
|
|
static inline void
|
|
|
|
umem_free_aligned(void *ptr, size_t size __maybe_unused)
|
|
|
|
{
|
|
|
|
#ifndef _WIN32
|
|
|
|
free((void *)ptr);
|
|
|
|
#else
|
|
|
|
_aligned_free(ptr);
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
2010-08-26 18:50:56 +00:00
|
|
|
static inline void
|
2020-07-24 00:41:48 +00:00
|
|
|
umem_nofail_callback(umem_nofail_callback_t *cb __maybe_unused)
|
2017-01-20 21:17:55 +00:00
|
|
|
{}
|
2010-08-26 18:50:56 +00:00
|
|
|
|
|
|
|
static inline umem_cache_t *
|
2013-11-01 19:26:11 +00:00
|
|
|
umem_cache_create(
|
2022-04-19 18:38:30 +00:00
|
|
|
const char *name, size_t bufsize, size_t align,
|
2013-11-01 19:26:11 +00:00
|
|
|
umem_constructor_t *constructor,
|
|
|
|
umem_destructor_t *destructor,
|
|
|
|
umem_reclaim_t *reclaim,
|
|
|
|
void *priv, void *vmp, int cflags)
|
2010-08-26 18:50:56 +00:00
|
|
|
{
|
|
|
|
umem_cache_t *cp;
|
|
|
|
|
2020-06-06 19:54:04 +00:00
|
|
|
cp = (umem_cache_t *)umem_alloc(sizeof (umem_cache_t), UMEM_DEFAULT);
|
2010-08-26 18:50:56 +00:00
|
|
|
if (cp) {
|
2018-04-04 17:16:47 +00:00
|
|
|
strlcpy(cp->cache_name, name, UMEM_CACHE_NAMELEN);
|
2010-08-26 18:50:56 +00:00
|
|
|
cp->cache_bufsize = bufsize;
|
|
|
|
cp->cache_align = align;
|
|
|
|
cp->cache_constructor = constructor;
|
|
|
|
cp->cache_destructor = destructor;
|
|
|
|
cp->cache_reclaim = reclaim;
|
|
|
|
cp->cache_private = priv;
|
|
|
|
cp->cache_arena = vmp;
|
|
|
|
cp->cache_cflags = cflags;
|
|
|
|
}
|
|
|
|
|
2013-11-01 19:26:11 +00:00
|
|
|
return (cp);
|
2010-08-26 18:50:56 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
static inline void
|
|
|
|
umem_cache_destroy(umem_cache_t *cp)
|
|
|
|
{
|
2013-11-01 19:26:11 +00:00
|
|
|
umem_free(cp, sizeof (umem_cache_t));
|
2010-08-26 18:50:56 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
static inline void *
|
|
|
|
umem_cache_alloc(umem_cache_t *cp, int flags)
|
|
|
|
{
|
2014-05-21 09:17:23 +00:00
|
|
|
void *ptr = NULL;
|
2010-08-26 18:50:56 +00:00
|
|
|
|
|
|
|
if (cp->cache_align != 0)
|
2013-11-01 19:26:11 +00:00
|
|
|
ptr = umem_alloc_aligned(
|
|
|
|
cp->cache_bufsize, cp->cache_align, flags);
|
2010-08-26 18:50:56 +00:00
|
|
|
else
|
|
|
|
ptr = umem_alloc(cp->cache_bufsize, flags);
|
|
|
|
|
|
|
|
if (ptr && cp->cache_constructor)
|
|
|
|
cp->cache_constructor(ptr, cp->cache_private, UMEM_DEFAULT);
|
|
|
|
|
2013-11-01 19:26:11 +00:00
|
|
|
return (ptr);
|
2010-08-26 18:50:56 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
static inline void
|
|
|
|
umem_cache_free(umem_cache_t *cp, void *ptr)
|
|
|
|
{
|
|
|
|
if (cp->cache_destructor)
|
|
|
|
cp->cache_destructor(ptr, cp->cache_private);
|
|
|
|
|
2022-10-26 22:08:31 +00:00
|
|
|
if (cp->cache_align != 0)
|
|
|
|
umem_free_aligned(ptr, cp->cache_bufsize);
|
|
|
|
else
|
|
|
|
umem_free(ptr, cp->cache_bufsize);
|
2010-08-26 18:50:56 +00:00
|
|
|
}
|
|
|
|
|
2015-06-24 22:48:22 +00:00
|
|
|
static inline void
|
2020-07-24 00:41:48 +00:00
|
|
|
umem_cache_reap_now(umem_cache_t *cp __maybe_unused)
|
2015-06-24 22:48:22 +00:00
|
|
|
{
|
|
|
|
}
|
|
|
|
|
2010-08-26 18:50:56 +00:00
|
|
|
#ifdef __cplusplus
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
#endif
|