2017-10-04 16:33:43 +00:00
|
|
|
/*
|
2010-05-17 22:18:00 +00:00
|
|
|
* Copyright (C) 2007-2010 Lawrence Livermore National Security, LLC.
|
|
|
|
* Copyright (C) 2007 The Regents of the University of California.
|
|
|
|
* Produced at Lawrence Livermore National Laboratory (cf, DISCLAIMER).
|
|
|
|
* Written by Brian Behlendorf <behlendorf1@llnl.gov>.
|
|
|
|
* UCRL-CODE-235197
|
|
|
|
*
|
|
|
|
* This file is part of the SPL, Solaris Porting Layer.
|
|
|
|
*
|
|
|
|
* The SPL is free software; you can redistribute it and/or modify it
|
|
|
|
* under the terms of the GNU General Public License as published by the
|
|
|
|
* Free Software Foundation; either version 2 of the License, or (at your
|
|
|
|
* option) any later version.
|
|
|
|
*
|
|
|
|
* The SPL is distributed in the hope that it will be useful, but WITHOUT
|
|
|
|
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
|
|
|
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
|
|
|
* for more details.
|
|
|
|
*
|
|
|
|
* You should have received a copy of the GNU General Public License along
|
|
|
|
* with the SPL. If not, see <http://www.gnu.org/licenses/>.
|
2017-10-04 16:33:43 +00:00
|
|
|
*/
|
2010-05-17 22:18:00 +00:00
|
|
|
|
2008-03-12 23:48:28 +00:00
|
|
|
#ifndef _SPL_BYTEORDER_H
|
2018-02-07 19:49:38 +00:00
|
|
|
#define _SPL_BYTEORDER_H
|
2008-03-07 20:48:44 +00:00
|
|
|
|
2008-03-12 23:48:28 +00:00
|
|
|
#include <asm/byteorder.h>
|
2020-07-28 20:02:49 +00:00
|
|
|
|
|
|
|
#if defined(__BIG_ENDIAN) && !defined(_ZFS_BIG_ENDIAN)
|
|
|
|
#define _ZFS_BIG_ENDIAN
|
|
|
|
#endif
|
|
|
|
|
|
|
|
#if defined(__LITTLE_ENDIAN) && !defined(_ZFS_LITTLE_ENDIAN)
|
|
|
|
#define _ZFS_LITTLE_ENDIAN
|
|
|
|
#endif
|
|
|
|
|
2016-02-18 23:24:29 +00:00
|
|
|
#include <sys/isa_defs.h>
|
2008-03-12 23:48:28 +00:00
|
|
|
|
Reduce false positives from Static Analyzers
Both Clang's Static Analyzer and Synopsys' Coverity would ignore
assertions. Following Clang's advice, we annotate our assertions:
https://clang-analyzer.llvm.org/annotations.html#custom_assertions
This makes both Clang's Static Analyzer and Coverity properly identify
assertions. This change reduced Clang's reported defects from 246 to
180. It also reduced the false positives reported by Coverityi by 10,
while enabling Coverity to find 9 more defects that previously were
false negatives.
A couple examples of this would be CID-1524417 and CID-1524423. After
submitting a build to coverity with the modified assertions, CID-1524417
disappeared while the report for CID-1524423 no longer claimed that the
assertion tripped.
Coincidentally, it turns out that it is possible to more accurately
annotate our headers than the Coverity modelling file permits in the
case of format strings. Since we can do that and this patch annotates
headers whenever `__coverity_panic__()` would have been used in the
model file, we drop all models that use `__coverity_panic__()` from the
model file.
Upon seeing the success in eliminating false positives involving
assertions, it occurred to me that we could also modify our headers to
eliminate coverity's false positives involving byte swaps. We now have
coverity specific byteswap macros, that do nothing, to disable
Coverity's false positives when we do byte swaps. This allowed us to
also drop the byteswap definitions from the model file.
Lastly, a model file update has been done beyond the mentioned
deletions:
* The definitions of `umem_alloc_aligned()`, `umem_alloc()` andi
`umem_zalloc()` were originally implemented in a way that was
intended to inform coverity that when KM_SLEEP has been passed these
functions, they do not return NULL. A small error in how this was
done was found, so we correct it.
* Definitions for umem_cache_alloc() and umem_cache_free() have been
added.
In practice, no false positives were avoided by making these changes,
but in the interest of correctness from future coverity builds, we make
them anyway.
Reviewed-by: Brian Behlendorf <behlendorf1@llnl.gov>
Reviewed-by: Ryan Moeller <ryan@iXsystems.com>
Signed-off-by: Richard Yao <richard.yao@alumni.stonybrook.edu>
Closes #13902
2022-09-30 22:30:12 +00:00
|
|
|
#ifdef __COVERITY__
|
|
|
|
/*
|
|
|
|
* Coverity's taint warnings from byteswapping are false positives for us.
|
|
|
|
* Suppress them by hiding byteswapping from Coverity.
|
|
|
|
*/
|
|
|
|
|
|
|
|
#define BSWAP_8(x) ((x) & 0xff)
|
|
|
|
#define BSWAP_16(x) ((x) & 0xffff)
|
|
|
|
#define BSWAP_32(x) ((x) & 0xffffffff)
|
|
|
|
#define BSWAP_64(x) (x)
|
|
|
|
|
|
|
|
#else /* __COVERITY__ */
|
|
|
|
|
2018-02-15 01:01:15 +00:00
|
|
|
#define BSWAP_8(x) ((x) & 0xff)
|
|
|
|
#define BSWAP_16(x) ((BSWAP_8(x) << 8) | BSWAP_8((x) >> 8))
|
|
|
|
#define BSWAP_32(x) ((BSWAP_16(x) << 16) | BSWAP_16((x) >> 16))
|
|
|
|
#define BSWAP_64(x) ((BSWAP_32(x) << 32) | BSWAP_32((x) >> 32))
|
|
|
|
|
Reduce false positives from Static Analyzers
Both Clang's Static Analyzer and Synopsys' Coverity would ignore
assertions. Following Clang's advice, we annotate our assertions:
https://clang-analyzer.llvm.org/annotations.html#custom_assertions
This makes both Clang's Static Analyzer and Coverity properly identify
assertions. This change reduced Clang's reported defects from 246 to
180. It also reduced the false positives reported by Coverityi by 10,
while enabling Coverity to find 9 more defects that previously were
false negatives.
A couple examples of this would be CID-1524417 and CID-1524423. After
submitting a build to coverity with the modified assertions, CID-1524417
disappeared while the report for CID-1524423 no longer claimed that the
assertion tripped.
Coincidentally, it turns out that it is possible to more accurately
annotate our headers than the Coverity modelling file permits in the
case of format strings. Since we can do that and this patch annotates
headers whenever `__coverity_panic__()` would have been used in the
model file, we drop all models that use `__coverity_panic__()` from the
model file.
Upon seeing the success in eliminating false positives involving
assertions, it occurred to me that we could also modify our headers to
eliminate coverity's false positives involving byte swaps. We now have
coverity specific byteswap macros, that do nothing, to disable
Coverity's false positives when we do byte swaps. This allowed us to
also drop the byteswap definitions from the model file.
Lastly, a model file update has been done beyond the mentioned
deletions:
* The definitions of `umem_alloc_aligned()`, `umem_alloc()` andi
`umem_zalloc()` were originally implemented in a way that was
intended to inform coverity that when KM_SLEEP has been passed these
functions, they do not return NULL. A small error in how this was
done was found, so we correct it.
* Definitions for umem_cache_alloc() and umem_cache_free() have been
added.
In practice, no false positives were avoided by making these changes,
but in the interest of correctness from future coverity builds, we make
them anyway.
Reviewed-by: Brian Behlendorf <behlendorf1@llnl.gov>
Reviewed-by: Ryan Moeller <ryan@iXsystems.com>
Signed-off-by: Richard Yao <richard.yao@alumni.stonybrook.edu>
Closes #13902
2022-09-30 22:30:12 +00:00
|
|
|
#endif /* __COVERITY__ */
|
|
|
|
|
2018-02-07 19:49:38 +00:00
|
|
|
#define LE_16(x) cpu_to_le16(x)
|
|
|
|
#define LE_32(x) cpu_to_le32(x)
|
|
|
|
#define LE_64(x) cpu_to_le64(x)
|
|
|
|
#define BE_16(x) cpu_to_be16(x)
|
|
|
|
#define BE_32(x) cpu_to_be32(x)
|
|
|
|
#define BE_64(x) cpu_to_be64(x)
|
2008-03-12 23:48:28 +00:00
|
|
|
|
2018-02-07 19:49:38 +00:00
|
|
|
#define BE_IN8(xa) \
|
2013-01-28 22:48:11 +00:00
|
|
|
*((uint8_t *)(xa))
|
|
|
|
|
2018-02-07 19:49:38 +00:00
|
|
|
#define BE_IN16(xa) \
|
2013-01-28 22:48:11 +00:00
|
|
|
(((uint16_t)BE_IN8(xa) << 8) | BE_IN8((uint8_t *)(xa)+1))
|
|
|
|
|
2018-02-07 19:49:38 +00:00
|
|
|
#define BE_IN32(xa) \
|
2013-01-28 22:48:11 +00:00
|
|
|
(((uint32_t)BE_IN16(xa) << 16) | BE_IN16((uint8_t *)(xa)+2))
|
|
|
|
|
2020-07-28 20:02:49 +00:00
|
|
|
#ifdef _ZFS_BIG_ENDIAN
|
2016-02-18 23:24:29 +00:00
|
|
|
static __inline__ uint64_t
|
2018-02-07 19:49:38 +00:00
|
|
|
htonll(uint64_t n)
|
|
|
|
{
|
2016-02-18 23:24:29 +00:00
|
|
|
return (n);
|
|
|
|
}
|
|
|
|
|
|
|
|
static __inline__ uint64_t
|
2018-02-07 19:49:38 +00:00
|
|
|
ntohll(uint64_t n)
|
|
|
|
{
|
2016-02-18 23:24:29 +00:00
|
|
|
return (n);
|
|
|
|
}
|
|
|
|
#else
|
|
|
|
static __inline__ uint64_t
|
2018-02-07 19:49:38 +00:00
|
|
|
htonll(uint64_t n)
|
|
|
|
{
|
2016-02-18 23:24:29 +00:00
|
|
|
return ((((uint64_t)htonl(n)) << 32) + htonl(n >> 32));
|
|
|
|
}
|
|
|
|
|
|
|
|
static __inline__ uint64_t
|
2018-02-07 19:49:38 +00:00
|
|
|
ntohll(uint64_t n)
|
|
|
|
{
|
2016-02-18 23:24:29 +00:00
|
|
|
return ((((uint64_t)ntohl(n)) << 32) + ntohl(n >> 32));
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
2008-03-12 23:48:28 +00:00
|
|
|
#endif /* SPL_BYTEORDER_H */
|