diff --git a/cmd/arc_summary/Makefile.am b/cmd/arc_summary/Makefile.am index 1a26c2c199..f419f07e0e 100644 --- a/cmd/arc_summary/Makefile.am +++ b/cmd/arc_summary/Makefile.am @@ -1,13 +1,8 @@ bin_SCRIPTS = arc_summary CLEANFILES = arc_summary -EXTRA_DIST = arc_summary2 arc_summary3 - -if USING_PYTHON_2 -SCRIPT = arc_summary2 -else +EXTRA_DIST = arc_summary3 SCRIPT = arc_summary3 -endif arc_summary: $(SCRIPT) cp $< $@ diff --git a/cmd/arc_summary/arc_summary2 b/cmd/arc_summary/arc_summary2 deleted file mode 100755 index 3302a802d1..0000000000 --- a/cmd/arc_summary/arc_summary2 +++ /dev/null @@ -1,1180 +0,0 @@ -#!/usr/bin/env python2 -# -# $Id: arc_summary.pl,v 388:e27800740aa2 2011-07-08 02:53:29Z jhell $ -# -# Copyright (c) 2008 Ben Rockwood , -# Copyright (c) 2010 Martin Matuska , -# Copyright (c) 2010-2011 Jason J. Hellenthal , -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in the -# documentation and/or other materials provided with the distribution. -# -# THIS SOFTWARE IS PROVIDED BY AUTHOR AND CONTRIBUTORS ``AS IS'' AND -# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -# ARE DISCLAIMED. IN NO EVENT SHALL AUTHOR OR CONTRIBUTORS BE LIABLE -# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS -# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) -# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY -# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF -# SUCH DAMAGE. -# -# If you are having troubles when using this script from cron(8) please try -# adjusting your PATH before reporting problems. -# -# Note some of this code uses older code (eg getopt instead of argparse, -# subprocess.Popen() instead of subprocess.run()) because we need to support -# some very old versions of Python. -# - -"""Print statistics on the ZFS Adjustable Replacement Cache (ARC) - -Provides basic information on the ARC, its efficiency, the L2ARC (if present), -the Data Management Unit (DMU), Virtual Devices (VDEVs), and tunables. See the -in-source documentation and code at -https://github.com/openzfs/zfs/blob/master/module/zfs/arc.c for details. -""" - -import getopt -import os -import sys -import time -import errno - -from subprocess import Popen, PIPE -from decimal import Decimal as D - - -if sys.platform.startswith('freebsd'): - # Requires py27-sysctl on FreeBSD - import sysctl - - def is_value(ctl): - return ctl.type != sysctl.CTLTYPE_NODE - - def load_kstats(namespace): - """Collect information on a specific subsystem of the ARC""" - - base = 'kstat.zfs.misc.%s.' % namespace - fmt = lambda kstat: (kstat.name, D(kstat.value)) - kstats = sysctl.filter(base) - return [fmt(kstat) for kstat in kstats if is_value(kstat)] - - def load_tunables(): - ctls = sysctl.filter('vfs.zfs') - return dict((ctl.name, ctl.value) for ctl in ctls if is_value(ctl)) - -elif sys.platform.startswith('linux'): - - def load_kstats(namespace): - """Collect information on a specific subsystem of the ARC""" - - kstat = 'kstat.zfs.misc.%s.%%s' % namespace - path = '/proc/spl/kstat/zfs/%s' % namespace - with open(path) as f: - entries = [line.strip().split() for line in f][2:] # Skip header - return [(kstat % name, D(value)) for name, _, value in entries] - - def load_tunables(): - basepath = '/sys/module/zfs/parameters' - tunables = {} - for name in os.listdir(basepath): - if not name: - continue - path = '%s/%s' % (basepath, name) - with open(path) as f: - value = f.read() - tunables[name] = value.strip() - return tunables - - -show_tunable_descriptions = False -alternate_tunable_layout = False - - -def get_Kstat(): - """Collect information on the ZFS subsystem from the /proc virtual - file system. The name "kstat" is a holdover from the Solaris utility - of the same name. - """ - - Kstat = {} - Kstat.update(load_kstats('arcstats')) - Kstat.update(load_kstats('zfetchstats')) - Kstat.update(load_kstats('vdev_cache_stats')) - return Kstat - - -def fBytes(b=0): - """Return human-readable representation of a byte value in - powers of 2 (eg "KiB" for "kibibytes", etc) to two decimal - points. Values smaller than one KiB are returned without - decimal points. - """ - - prefixes = [ - [2**80, "YiB"], # yobibytes (yotta) - [2**70, "ZiB"], # zebibytes (zetta) - [2**60, "EiB"], # exbibytes (exa) - [2**50, "PiB"], # pebibytes (peta) - [2**40, "TiB"], # tebibytes (tera) - [2**30, "GiB"], # gibibytes (giga) - [2**20, "MiB"], # mebibytes (mega) - [2**10, "KiB"]] # kibibytes (kilo) - - if b >= 2**10: - - for limit, unit in prefixes: - - if b >= limit: - value = b / limit - break - - result = "%0.2f\t%s" % (value, unit) - - else: - - result = "%d\tBytes" % b - - return result - - -def fHits(hits=0): - """Create a human-readable representation of the number of hits. - The single-letter symbols used are SI to avoid the confusion caused - by the different "short scale" and "long scale" representations in - English, which use the same words for different values. See - https://en.wikipedia.org/wiki/Names_of_large_numbers and - https://physics.nist.gov/cuu/Units/prefixes.html - """ - - numbers = [ - [10**24, 'Y'], # yotta (septillion) - [10**21, 'Z'], # zetta (sextillion) - [10**18, 'E'], # exa (quintrillion) - [10**15, 'P'], # peta (quadrillion) - [10**12, 'T'], # tera (trillion) - [10**9, 'G'], # giga (billion) - [10**6, 'M'], # mega (million) - [10**3, 'k']] # kilo (thousand) - - if hits >= 1000: - - for limit, symbol in numbers: - - if hits >= limit: - value = hits/limit - break - - result = "%0.2f%s" % (value, symbol) - - else: - - result = "%d" % hits - - return result - - -def fPerc(lVal=0, rVal=0, Decimal=2): - """Calculate percentage value and return in human-readable format""" - - if rVal > 0: - return str("%0." + str(Decimal) + "f") % (100 * (lVal / rVal)) + "%" - else: - return str("%0." + str(Decimal) + "f") % 100 + "%" - - -def get_arc_summary(Kstat): - """Collect general data on the ARC""" - - output = {} - memory_throttle_count = Kstat[ - "kstat.zfs.misc.arcstats.memory_throttle_count" - ] - - if memory_throttle_count > 0: - output['health'] = 'THROTTLED' - else: - output['health'] = 'HEALTHY' - - output['memory_throttle_count'] = fHits(memory_throttle_count) - - # ARC Misc. - deleted = Kstat["kstat.zfs.misc.arcstats.deleted"] - mutex_miss = Kstat["kstat.zfs.misc.arcstats.mutex_miss"] - evict_skip = Kstat["kstat.zfs.misc.arcstats.evict_skip"] - evict_l2_cached = Kstat["kstat.zfs.misc.arcstats.evict_l2_cached"] - evict_l2_eligible = Kstat["kstat.zfs.misc.arcstats.evict_l2_eligible"] - evict_l2_eligible_mfu = Kstat["kstat.zfs.misc.arcstats.evict_l2_eligible_mfu"] - evict_l2_eligible_mru = Kstat["kstat.zfs.misc.arcstats.evict_l2_eligible_mru"] - evict_l2_ineligible = Kstat["kstat.zfs.misc.arcstats.evict_l2_ineligible"] - evict_l2_skip = Kstat["kstat.zfs.misc.arcstats.evict_l2_skip"] - - # ARC Misc. - output["arc_misc"] = {} - output["arc_misc"]["deleted"] = fHits(deleted) - output["arc_misc"]["mutex_miss"] = fHits(mutex_miss) - output["arc_misc"]["evict_skips"] = fHits(evict_skip) - output["arc_misc"]["evict_l2_skip"] = fHits(evict_l2_skip) - output["arc_misc"]["evict_l2_cached"] = fBytes(evict_l2_cached) - output["arc_misc"]["evict_l2_eligible"] = fBytes(evict_l2_eligible) - output["arc_misc"]["evict_l2_eligible_mfu"] = { - 'per': fPerc(evict_l2_eligible_mfu, evict_l2_eligible), - 'num': fBytes(evict_l2_eligible_mfu), - } - output["arc_misc"]["evict_l2_eligible_mru"] = { - 'per': fPerc(evict_l2_eligible_mru, evict_l2_eligible), - 'num': fBytes(evict_l2_eligible_mru), - } - output["arc_misc"]["evict_l2_ineligible"] = fBytes(evict_l2_ineligible) - - # ARC Sizing - arc_size = Kstat["kstat.zfs.misc.arcstats.size"] - mru_size = Kstat["kstat.zfs.misc.arcstats.mru_size"] - mfu_size = Kstat["kstat.zfs.misc.arcstats.mfu_size"] - meta_limit = Kstat["kstat.zfs.misc.arcstats.arc_meta_limit"] - meta_size = Kstat["kstat.zfs.misc.arcstats.arc_meta_used"] - dnode_limit = Kstat["kstat.zfs.misc.arcstats.arc_dnode_limit"] - dnode_size = Kstat["kstat.zfs.misc.arcstats.dnode_size"] - target_max_size = Kstat["kstat.zfs.misc.arcstats.c_max"] - target_min_size = Kstat["kstat.zfs.misc.arcstats.c_min"] - target_size = Kstat["kstat.zfs.misc.arcstats.c"] - - target_size_ratio = (target_max_size / target_min_size) - - # ARC Sizing - output['arc_sizing'] = {} - output['arc_sizing']['arc_size'] = { - 'per': fPerc(arc_size, target_max_size), - 'num': fBytes(arc_size), - } - output['arc_sizing']['target_max_size'] = { - 'ratio': target_size_ratio, - 'num': fBytes(target_max_size), - } - output['arc_sizing']['target_min_size'] = { - 'per': fPerc(target_min_size, target_max_size), - 'num': fBytes(target_min_size), - } - output['arc_sizing']['target_size'] = { - 'per': fPerc(target_size, target_max_size), - 'num': fBytes(target_size), - } - output['arc_sizing']['meta_limit'] = { - 'per': fPerc(meta_limit, target_max_size), - 'num': fBytes(meta_limit), - } - output['arc_sizing']['meta_size'] = { - 'per': fPerc(meta_size, meta_limit), - 'num': fBytes(meta_size), - } - output['arc_sizing']['dnode_limit'] = { - 'per': fPerc(dnode_limit, meta_limit), - 'num': fBytes(dnode_limit), - } - output['arc_sizing']['dnode_size'] = { - 'per': fPerc(dnode_size, dnode_limit), - 'num': fBytes(dnode_size), - } - - # ARC Hash Breakdown - output['arc_hash_break'] = {} - output['arc_hash_break']['hash_chain_max'] = Kstat[ - "kstat.zfs.misc.arcstats.hash_chain_max" - ] - output['arc_hash_break']['hash_chains'] = Kstat[ - "kstat.zfs.misc.arcstats.hash_chains" - ] - output['arc_hash_break']['hash_collisions'] = Kstat[ - "kstat.zfs.misc.arcstats.hash_collisions" - ] - output['arc_hash_break']['hash_elements'] = Kstat[ - "kstat.zfs.misc.arcstats.hash_elements" - ] - output['arc_hash_break']['hash_elements_max'] = Kstat[ - "kstat.zfs.misc.arcstats.hash_elements_max" - ] - - output['arc_size_break'] = {} - output['arc_size_break']['recently_used_cache_size'] = { - 'per': fPerc(mru_size, mru_size + mfu_size), - 'num': fBytes(mru_size), - } - output['arc_size_break']['frequently_used_cache_size'] = { - 'per': fPerc(mfu_size, mru_size + mfu_size), - 'num': fBytes(mfu_size), - } - - # ARC Hash Breakdown - hash_chain_max = Kstat["kstat.zfs.misc.arcstats.hash_chain_max"] - hash_chains = Kstat["kstat.zfs.misc.arcstats.hash_chains"] - hash_collisions = Kstat["kstat.zfs.misc.arcstats.hash_collisions"] - hash_elements = Kstat["kstat.zfs.misc.arcstats.hash_elements"] - hash_elements_max = Kstat["kstat.zfs.misc.arcstats.hash_elements_max"] - - output['arc_hash_break'] = {} - output['arc_hash_break']['elements_max'] = fHits(hash_elements_max) - output['arc_hash_break']['elements_current'] = { - 'per': fPerc(hash_elements, hash_elements_max), - 'num': fHits(hash_elements), - } - output['arc_hash_break']['collisions'] = fHits(hash_collisions) - output['arc_hash_break']['chain_max'] = fHits(hash_chain_max) - output['arc_hash_break']['chains'] = fHits(hash_chains) - - return output - - -def _arc_summary(Kstat): - """Print information on the ARC""" - - # ARC Sizing - arc = get_arc_summary(Kstat) - - sys.stdout.write("ARC Summary: (%s)\n" % arc['health']) - - sys.stdout.write("\tMemory Throttle Count:\t\t\t%s\n" % - arc['memory_throttle_count']) - sys.stdout.write("\n") - - # ARC Misc. - sys.stdout.write("ARC Misc:\n") - sys.stdout.write("\tDeleted:\t\t\t\t%s\n" % arc['arc_misc']['deleted']) - sys.stdout.write("\tMutex Misses:\t\t\t\t%s\n" % - arc['arc_misc']['mutex_miss']) - sys.stdout.write("\tEviction Skips:\t\t\t\t%s\n" % - arc['arc_misc']['evict_skips']) - sys.stdout.write("\tEviction Skips Due to L2 Writes:\t%s\n" % - arc['arc_misc']['evict_l2_skip']) - sys.stdout.write("\tL2 Cached Evictions:\t\t\t%s\n" % - arc['arc_misc']['evict_l2_cached']) - sys.stdout.write("\tL2 Eligible Evictions:\t\t\t%s\n" % - arc['arc_misc']['evict_l2_eligible']) - sys.stdout.write("\tL2 Eligible MFU Evictions:\t%s\t%s\n" % ( - arc['arc_misc']['evict_l2_eligible_mfu']['per'], - arc['arc_misc']['evict_l2_eligible_mfu']['num'], - ) - ) - sys.stdout.write("\tL2 Eligible MRU Evictions:\t%s\t%s\n" % ( - arc['arc_misc']['evict_l2_eligible_mru']['per'], - arc['arc_misc']['evict_l2_eligible_mru']['num'], - ) - ) - sys.stdout.write("\tL2 Ineligible Evictions:\t\t%s\n" % - arc['arc_misc']['evict_l2_ineligible']) - sys.stdout.write("\n") - - # ARC Sizing - sys.stdout.write("ARC Size:\t\t\t\t%s\t%s\n" % ( - arc['arc_sizing']['arc_size']['per'], - arc['arc_sizing']['arc_size']['num'] - ) - ) - sys.stdout.write("\tTarget Size: (Adaptive)\t\t%s\t%s\n" % ( - arc['arc_sizing']['target_size']['per'], - arc['arc_sizing']['target_size']['num'], - ) - ) - - sys.stdout.write("\tMin Size (Hard Limit):\t\t%s\t%s\n" % ( - arc['arc_sizing']['target_min_size']['per'], - arc['arc_sizing']['target_min_size']['num'], - ) - ) - - sys.stdout.write("\tMax Size (High Water):\t\t%d:1\t%s\n" % ( - arc['arc_sizing']['target_max_size']['ratio'], - arc['arc_sizing']['target_max_size']['num'], - ) - ) - - sys.stdout.write("\nARC Size Breakdown:\n") - sys.stdout.write("\tRecently Used Cache Size:\t%s\t%s\n" % ( - arc['arc_size_break']['recently_used_cache_size']['per'], - arc['arc_size_break']['recently_used_cache_size']['num'], - ) - ) - sys.stdout.write("\tFrequently Used Cache Size:\t%s\t%s\n" % ( - arc['arc_size_break']['frequently_used_cache_size']['per'], - arc['arc_size_break']['frequently_used_cache_size']['num'], - ) - ) - sys.stdout.write("\tMetadata Size (Hard Limit):\t%s\t%s\n" % ( - arc['arc_sizing']['meta_limit']['per'], - arc['arc_sizing']['meta_limit']['num'], - ) - ) - sys.stdout.write("\tMetadata Size:\t\t\t%s\t%s\n" % ( - arc['arc_sizing']['meta_size']['per'], - arc['arc_sizing']['meta_size']['num'], - ) - ) - sys.stdout.write("\tDnode Size (Hard Limit):\t%s\t%s\n" % ( - arc['arc_sizing']['dnode_limit']['per'], - arc['arc_sizing']['dnode_limit']['num'], - ) - ) - sys.stdout.write("\tDnode Size:\t\t\t%s\t%s\n" % ( - arc['arc_sizing']['dnode_size']['per'], - arc['arc_sizing']['dnode_size']['num'], - ) - ) - - sys.stdout.write("\n") - - # ARC Hash Breakdown - sys.stdout.write("ARC Hash Breakdown:\n") - sys.stdout.write("\tElements Max:\t\t\t\t%s\n" % - arc['arc_hash_break']['elements_max']) - sys.stdout.write("\tElements Current:\t\t%s\t%s\n" % ( - arc['arc_hash_break']['elements_current']['per'], - arc['arc_hash_break']['elements_current']['num'], - ) - ) - sys.stdout.write("\tCollisions:\t\t\t\t%s\n" % - arc['arc_hash_break']['collisions']) - sys.stdout.write("\tChain Max:\t\t\t\t%s\n" % - arc['arc_hash_break']['chain_max']) - sys.stdout.write("\tChains:\t\t\t\t\t%s\n" % - arc['arc_hash_break']['chains']) - - -def get_arc_efficiency(Kstat): - """Collect information on the efficiency of the ARC""" - - output = {} - - arc_hits = Kstat["kstat.zfs.misc.arcstats.hits"] - arc_misses = Kstat["kstat.zfs.misc.arcstats.misses"] - demand_data_hits = Kstat["kstat.zfs.misc.arcstats.demand_data_hits"] - demand_data_misses = Kstat["kstat.zfs.misc.arcstats.demand_data_misses"] - demand_metadata_hits = Kstat[ - "kstat.zfs.misc.arcstats.demand_metadata_hits" - ] - demand_metadata_misses = Kstat[ - "kstat.zfs.misc.arcstats.demand_metadata_misses" - ] - mfu_ghost_hits = Kstat["kstat.zfs.misc.arcstats.mfu_ghost_hits"] - mfu_hits = Kstat["kstat.zfs.misc.arcstats.mfu_hits"] - mru_ghost_hits = Kstat["kstat.zfs.misc.arcstats.mru_ghost_hits"] - mru_hits = Kstat["kstat.zfs.misc.arcstats.mru_hits"] - prefetch_data_hits = Kstat["kstat.zfs.misc.arcstats.prefetch_data_hits"] - prefetch_data_misses = Kstat[ - "kstat.zfs.misc.arcstats.prefetch_data_misses" - ] - prefetch_metadata_hits = Kstat[ - "kstat.zfs.misc.arcstats.prefetch_metadata_hits" - ] - prefetch_metadata_misses = Kstat[ - "kstat.zfs.misc.arcstats.prefetch_metadata_misses" - ] - - anon_hits = arc_hits - ( - mfu_hits + mru_hits + mfu_ghost_hits + mru_ghost_hits - ) - arc_accesses_total = (arc_hits + arc_misses) - demand_data_total = (demand_data_hits + demand_data_misses) - prefetch_data_total = (prefetch_data_hits + prefetch_data_misses) - real_hits = (mfu_hits + mru_hits) - - output["total_accesses"] = fHits(arc_accesses_total) - output["cache_hit_ratio"] = { - 'per': fPerc(arc_hits, arc_accesses_total), - 'num': fHits(arc_hits), - } - output["cache_miss_ratio"] = { - 'per': fPerc(arc_misses, arc_accesses_total), - 'num': fHits(arc_misses), - } - output["actual_hit_ratio"] = { - 'per': fPerc(real_hits, arc_accesses_total), - 'num': fHits(real_hits), - } - output["data_demand_efficiency"] = { - 'per': fPerc(demand_data_hits, demand_data_total), - 'num': fHits(demand_data_total), - } - - if prefetch_data_total > 0: - output["data_prefetch_efficiency"] = { - 'per': fPerc(prefetch_data_hits, prefetch_data_total), - 'num': fHits(prefetch_data_total), - } - - if anon_hits > 0: - output["cache_hits_by_cache_list"] = {} - output["cache_hits_by_cache_list"]["anonymously_used"] = { - 'per': fPerc(anon_hits, arc_hits), - 'num': fHits(anon_hits), - } - - output["most_recently_used"] = { - 'per': fPerc(mru_hits, arc_hits), - 'num': fHits(mru_hits), - } - output["most_frequently_used"] = { - 'per': fPerc(mfu_hits, arc_hits), - 'num': fHits(mfu_hits), - } - output["most_recently_used_ghost"] = { - 'per': fPerc(mru_ghost_hits, arc_hits), - 'num': fHits(mru_ghost_hits), - } - output["most_frequently_used_ghost"] = { - 'per': fPerc(mfu_ghost_hits, arc_hits), - 'num': fHits(mfu_ghost_hits), - } - - output["cache_hits_by_data_type"] = {} - output["cache_hits_by_data_type"]["demand_data"] = { - 'per': fPerc(demand_data_hits, arc_hits), - 'num': fHits(demand_data_hits), - } - output["cache_hits_by_data_type"]["prefetch_data"] = { - 'per': fPerc(prefetch_data_hits, arc_hits), - 'num': fHits(prefetch_data_hits), - } - output["cache_hits_by_data_type"]["demand_metadata"] = { - 'per': fPerc(demand_metadata_hits, arc_hits), - 'num': fHits(demand_metadata_hits), - } - output["cache_hits_by_data_type"]["prefetch_metadata"] = { - 'per': fPerc(prefetch_metadata_hits, arc_hits), - 'num': fHits(prefetch_metadata_hits), - } - - output["cache_misses_by_data_type"] = {} - output["cache_misses_by_data_type"]["demand_data"] = { - 'per': fPerc(demand_data_misses, arc_misses), - 'num': fHits(demand_data_misses), - } - output["cache_misses_by_data_type"]["prefetch_data"] = { - 'per': fPerc(prefetch_data_misses, arc_misses), - 'num': fHits(prefetch_data_misses), - } - output["cache_misses_by_data_type"]["demand_metadata"] = { - 'per': fPerc(demand_metadata_misses, arc_misses), - 'num': fHits(demand_metadata_misses), - } - output["cache_misses_by_data_type"]["prefetch_metadata"] = { - 'per': fPerc(prefetch_metadata_misses, arc_misses), - 'num': fHits(prefetch_metadata_misses), - } - - return output - - -def _arc_efficiency(Kstat): - """Print information on the efficiency of the ARC""" - - arc = get_arc_efficiency(Kstat) - - sys.stdout.write("ARC Total accesses:\t\t\t\t\t%s\n" % - arc['total_accesses']) - sys.stdout.write("\tCache Hit Ratio:\t\t%s\t%s\n" % ( - arc['cache_hit_ratio']['per'], - arc['cache_hit_ratio']['num'], - ) - ) - sys.stdout.write("\tCache Miss Ratio:\t\t%s\t%s\n" % ( - arc['cache_miss_ratio']['per'], - arc['cache_miss_ratio']['num'], - ) - ) - - sys.stdout.write("\tActual Hit Ratio:\t\t%s\t%s\n" % ( - arc['actual_hit_ratio']['per'], - arc['actual_hit_ratio']['num'], - ) - ) - - sys.stdout.write("\n") - sys.stdout.write("\tData Demand Efficiency:\t\t%s\t%s\n" % ( - arc['data_demand_efficiency']['per'], - arc['data_demand_efficiency']['num'], - ) - ) - - if 'data_prefetch_efficiency' in arc: - sys.stdout.write("\tData Prefetch Efficiency:\t%s\t%s\n" % ( - arc['data_prefetch_efficiency']['per'], - arc['data_prefetch_efficiency']['num'], - ) - ) - sys.stdout.write("\n") - - sys.stdout.write("\tCACHE HITS BY CACHE LIST:\n") - if 'cache_hits_by_cache_list' in arc: - sys.stdout.write("\t Anonymously Used:\t\t%s\t%s\n" % ( - arc['cache_hits_by_cache_list']['anonymously_used']['per'], - arc['cache_hits_by_cache_list']['anonymously_used']['num'], - ) - ) - sys.stdout.write("\t Most Recently Used:\t\t%s\t%s\n" % ( - arc['most_recently_used']['per'], - arc['most_recently_used']['num'], - ) - ) - sys.stdout.write("\t Most Frequently Used:\t\t%s\t%s\n" % ( - arc['most_frequently_used']['per'], - arc['most_frequently_used']['num'], - ) - ) - sys.stdout.write("\t Most Recently Used Ghost:\t%s\t%s\n" % ( - arc['most_recently_used_ghost']['per'], - arc['most_recently_used_ghost']['num'], - ) - ) - sys.stdout.write("\t Most Frequently Used Ghost:\t%s\t%s\n" % ( - arc['most_frequently_used_ghost']['per'], - arc['most_frequently_used_ghost']['num'], - ) - ) - - sys.stdout.write("\n\tCACHE HITS BY DATA TYPE:\n") - sys.stdout.write("\t Demand Data:\t\t\t%s\t%s\n" % ( - arc["cache_hits_by_data_type"]['demand_data']['per'], - arc["cache_hits_by_data_type"]['demand_data']['num'], - ) - ) - sys.stdout.write("\t Prefetch Data:\t\t%s\t%s\n" % ( - arc["cache_hits_by_data_type"]['prefetch_data']['per'], - arc["cache_hits_by_data_type"]['prefetch_data']['num'], - ) - ) - sys.stdout.write("\t Demand Metadata:\t\t%s\t%s\n" % ( - arc["cache_hits_by_data_type"]['demand_metadata']['per'], - arc["cache_hits_by_data_type"]['demand_metadata']['num'], - ) - ) - sys.stdout.write("\t Prefetch Metadata:\t\t%s\t%s\n" % ( - arc["cache_hits_by_data_type"]['prefetch_metadata']['per'], - arc["cache_hits_by_data_type"]['prefetch_metadata']['num'], - ) - ) - - sys.stdout.write("\n\tCACHE MISSES BY DATA TYPE:\n") - sys.stdout.write("\t Demand Data:\t\t\t%s\t%s\n" % ( - arc["cache_misses_by_data_type"]['demand_data']['per'], - arc["cache_misses_by_data_type"]['demand_data']['num'], - ) - ) - sys.stdout.write("\t Prefetch Data:\t\t%s\t%s\n" % ( - arc["cache_misses_by_data_type"]['prefetch_data']['per'], - arc["cache_misses_by_data_type"]['prefetch_data']['num'], - ) - ) - sys.stdout.write("\t Demand Metadata:\t\t%s\t%s\n" % ( - arc["cache_misses_by_data_type"]['demand_metadata']['per'], - arc["cache_misses_by_data_type"]['demand_metadata']['num'], - ) - ) - sys.stdout.write("\t Prefetch Metadata:\t\t%s\t%s\n" % ( - arc["cache_misses_by_data_type"]['prefetch_metadata']['per'], - arc["cache_misses_by_data_type"]['prefetch_metadata']['num'], - ) - ) - - -def get_l2arc_summary(Kstat): - """Collection information on the L2ARC""" - - output = {} - - l2_abort_lowmem = Kstat["kstat.zfs.misc.arcstats.l2_abort_lowmem"] - l2_cksum_bad = Kstat["kstat.zfs.misc.arcstats.l2_cksum_bad"] - l2_evict_lock_retry = Kstat["kstat.zfs.misc.arcstats.l2_evict_lock_retry"] - l2_evict_reading = Kstat["kstat.zfs.misc.arcstats.l2_evict_reading"] - l2_feeds = Kstat["kstat.zfs.misc.arcstats.l2_feeds"] - l2_free_on_write = Kstat["kstat.zfs.misc.arcstats.l2_free_on_write"] - l2_hdr_size = Kstat["kstat.zfs.misc.arcstats.l2_hdr_size"] - l2_hits = Kstat["kstat.zfs.misc.arcstats.l2_hits"] - l2_io_error = Kstat["kstat.zfs.misc.arcstats.l2_io_error"] - l2_misses = Kstat["kstat.zfs.misc.arcstats.l2_misses"] - l2_rw_clash = Kstat["kstat.zfs.misc.arcstats.l2_rw_clash"] - l2_size = Kstat["kstat.zfs.misc.arcstats.l2_size"] - l2_asize = Kstat["kstat.zfs.misc.arcstats.l2_asize"] - l2_writes_done = Kstat["kstat.zfs.misc.arcstats.l2_writes_done"] - l2_writes_error = Kstat["kstat.zfs.misc.arcstats.l2_writes_error"] - l2_writes_sent = Kstat["kstat.zfs.misc.arcstats.l2_writes_sent"] - l2_mfu_asize = Kstat["kstat.zfs.misc.arcstats.l2_mfu_asize"] - l2_mru_asize = Kstat["kstat.zfs.misc.arcstats.l2_mru_asize"] - l2_prefetch_asize = Kstat["kstat.zfs.misc.arcstats.l2_prefetch_asize"] - l2_bufc_data_asize = Kstat["kstat.zfs.misc.arcstats.l2_bufc_data_asize"] - l2_bufc_metadata_asize = Kstat["kstat.zfs.misc.arcstats.l2_bufc_metadata_asize"] - - l2_access_total = (l2_hits + l2_misses) - output['l2_health_count'] = (l2_writes_error + l2_cksum_bad + l2_io_error) - - output['l2_access_total'] = l2_access_total - output['l2_size'] = l2_size - output['l2_asize'] = l2_asize - - if l2_size > 0 and l2_access_total > 0: - - if output['l2_health_count'] > 0: - output["health"] = "DEGRADED" - else: - output["health"] = "HEALTHY" - - output["low_memory_aborts"] = fHits(l2_abort_lowmem) - output["free_on_write"] = fHits(l2_free_on_write) - output["rw_clashes"] = fHits(l2_rw_clash) - output["bad_checksums"] = fHits(l2_cksum_bad) - output["io_errors"] = fHits(l2_io_error) - - output["l2_arc_size"] = {} - output["l2_arc_size"]["adaptive"] = fBytes(l2_size) - output["l2_arc_size"]["actual"] = { - 'per': fPerc(l2_asize, l2_size), - 'num': fBytes(l2_asize) - } - output["l2_arc_size"]["head_size"] = { - 'per': fPerc(l2_hdr_size, l2_size), - 'num': fBytes(l2_hdr_size), - } - output["l2_arc_size"]["mfu_asize"] = { - 'per': fPerc(l2_mfu_asize, l2_asize), - 'num': fBytes(l2_mfu_asize), - } - output["l2_arc_size"]["mru_asize"] = { - 'per': fPerc(l2_mru_asize, l2_asize), - 'num': fBytes(l2_mru_asize), - } - output["l2_arc_size"]["prefetch_asize"] = { - 'per': fPerc(l2_prefetch_asize, l2_asize), - 'num': fBytes(l2_prefetch_asize), - } - output["l2_arc_size"]["bufc_data_asize"] = { - 'per': fPerc(l2_bufc_data_asize, l2_asize), - 'num': fBytes(l2_bufc_data_asize), - } - output["l2_arc_size"]["bufc_metadata_asize"] = { - 'per': fPerc(l2_bufc_metadata_asize, l2_asize), - 'num': fBytes(l2_bufc_metadata_asize), - } - - output["l2_arc_evicts"] = {} - output["l2_arc_evicts"]['lock_retries'] = fHits(l2_evict_lock_retry) - output["l2_arc_evicts"]['reading'] = fHits(l2_evict_reading) - - output['l2_arc_breakdown'] = {} - output['l2_arc_breakdown']['value'] = fHits(l2_access_total) - output['l2_arc_breakdown']['hit_ratio'] = { - 'per': fPerc(l2_hits, l2_access_total), - 'num': fHits(l2_hits), - } - output['l2_arc_breakdown']['miss_ratio'] = { - 'per': fPerc(l2_misses, l2_access_total), - 'num': fHits(l2_misses), - } - output['l2_arc_breakdown']['feeds'] = fHits(l2_feeds) - - output['l2_arc_buffer'] = {} - - output['l2_arc_writes'] = {} - output['l2_writes_done'] = l2_writes_done - output['l2_writes_sent'] = l2_writes_sent - if l2_writes_done != l2_writes_sent: - output['l2_arc_writes']['writes_sent'] = { - 'value': "FAULTED", - 'num': fHits(l2_writes_sent), - } - output['l2_arc_writes']['done_ratio'] = { - 'per': fPerc(l2_writes_done, l2_writes_sent), - 'num': fHits(l2_writes_done), - } - output['l2_arc_writes']['error_ratio'] = { - 'per': fPerc(l2_writes_error, l2_writes_sent), - 'num': fHits(l2_writes_error), - } - else: - output['l2_arc_writes']['writes_sent'] = { - 'per': fPerc(100), - 'num': fHits(l2_writes_sent), - } - - return output - - -def _l2arc_summary(Kstat): - """Print information on the L2ARC""" - - arc = get_l2arc_summary(Kstat) - - if arc['l2_size'] > 0 and arc['l2_access_total'] > 0: - sys.stdout.write("L2 ARC Summary: ") - if arc['l2_health_count'] > 0: - sys.stdout.write("(DEGRADED)\n") - else: - sys.stdout.write("(HEALTHY)\n") - sys.stdout.write("\tLow Memory Aborts:\t\t\t%s\n" % - arc['low_memory_aborts']) - sys.stdout.write("\tFree on Write:\t\t\t\t%s\n" % arc['free_on_write']) - sys.stdout.write("\tR/W Clashes:\t\t\t\t%s\n" % arc['rw_clashes']) - sys.stdout.write("\tBad Checksums:\t\t\t\t%s\n" % arc['bad_checksums']) - sys.stdout.write("\tIO Errors:\t\t\t\t%s\n" % arc['io_errors']) - sys.stdout.write("\n") - - sys.stdout.write("L2 ARC Size: (Adaptive)\t\t\t\t%s\n" % - arc["l2_arc_size"]["adaptive"]) - sys.stdout.write("\tCompressed:\t\t\t%s\t%s\n" % ( - arc["l2_arc_size"]["actual"]["per"], - arc["l2_arc_size"]["actual"]["num"], - ) - ) - sys.stdout.write("\tHeader Size:\t\t\t%s\t%s\n" % ( - arc["l2_arc_size"]["head_size"]["per"], - arc["l2_arc_size"]["head_size"]["num"], - ) - ) - sys.stdout.write("\tMFU Alloc. Size:\t\t%s\t%s\n" % ( - arc["l2_arc_size"]["mfu_asize"]["per"], - arc["l2_arc_size"]["mfu_asize"]["num"], - ) - ) - sys.stdout.write("\tMRU Alloc. Size:\t\t%s\t%s\n" % ( - arc["l2_arc_size"]["mru_asize"]["per"], - arc["l2_arc_size"]["mru_asize"]["num"], - ) - ) - sys.stdout.write("\tPrefetch Alloc. Size:\t\t%s\t%s\n" % ( - arc["l2_arc_size"]["prefetch_asize"]["per"], - arc["l2_arc_size"]["prefetch_asize"]["num"], - ) - ) - sys.stdout.write("\tData (buf content) Alloc. Size:\t%s\t%s\n" % ( - arc["l2_arc_size"]["bufc_data_asize"]["per"], - arc["l2_arc_size"]["bufc_data_asize"]["num"], - ) - ) - sys.stdout.write("\tMetadata (buf content) Size:\t%s\t%s\n" % ( - arc["l2_arc_size"]["bufc_metadata_asize"]["per"], - arc["l2_arc_size"]["bufc_metadata_asize"]["num"], - ) - ) - sys.stdout.write("\n") - - if arc["l2_arc_evicts"]['lock_retries'] != '0' or \ - arc["l2_arc_evicts"]["reading"] != '0': - sys.stdout.write("L2 ARC Evictions:\n") - sys.stdout.write("\tLock Retries:\t\t\t\t%s\n" % - arc["l2_arc_evicts"]['lock_retries']) - sys.stdout.write("\tUpon Reading:\t\t\t\t%s\n" % - arc["l2_arc_evicts"]["reading"]) - sys.stdout.write("\n") - - sys.stdout.write("L2 ARC Breakdown:\t\t\t\t%s\n" % - arc['l2_arc_breakdown']['value']) - sys.stdout.write("\tHit Ratio:\t\t\t%s\t%s\n" % ( - arc['l2_arc_breakdown']['hit_ratio']['per'], - arc['l2_arc_breakdown']['hit_ratio']['num'], - ) - ) - - sys.stdout.write("\tMiss Ratio:\t\t\t%s\t%s\n" % ( - arc['l2_arc_breakdown']['miss_ratio']['per'], - arc['l2_arc_breakdown']['miss_ratio']['num'], - ) - ) - - sys.stdout.write("\tFeeds:\t\t\t\t\t%s\n" % - arc['l2_arc_breakdown']['feeds']) - sys.stdout.write("\n") - - sys.stdout.write("L2 ARC Writes:\n") - if arc['l2_writes_done'] != arc['l2_writes_sent']: - sys.stdout.write("\tWrites Sent: (%s)\t\t\t\t%s\n" % ( - arc['l2_arc_writes']['writes_sent']['value'], - arc['l2_arc_writes']['writes_sent']['num'], - ) - ) - sys.stdout.write("\t Done Ratio:\t\t\t%s\t%s\n" % ( - arc['l2_arc_writes']['done_ratio']['per'], - arc['l2_arc_writes']['done_ratio']['num'], - ) - ) - sys.stdout.write("\t Error Ratio:\t\t\t%s\t%s\n" % ( - arc['l2_arc_writes']['error_ratio']['per'], - arc['l2_arc_writes']['error_ratio']['num'], - ) - ) - else: - sys.stdout.write("\tWrites Sent:\t\t\t%s\t%s\n" % ( - arc['l2_arc_writes']['writes_sent']['per'], - arc['l2_arc_writes']['writes_sent']['num'], - ) - ) - - -def get_dmu_summary(Kstat): - """Collect information on the DMU""" - - output = {} - - zfetch_hits = Kstat["kstat.zfs.misc.zfetchstats.hits"] - zfetch_misses = Kstat["kstat.zfs.misc.zfetchstats.misses"] - - zfetch_access_total = (zfetch_hits + zfetch_misses) - output['zfetch_access_total'] = zfetch_access_total - - if zfetch_access_total > 0: - output['dmu'] = {} - output['dmu']['efficiency'] = {} - output['dmu']['efficiency']['value'] = fHits(zfetch_access_total) - output['dmu']['efficiency']['hit_ratio'] = { - 'per': fPerc(zfetch_hits, zfetch_access_total), - 'num': fHits(zfetch_hits), - } - output['dmu']['efficiency']['miss_ratio'] = { - 'per': fPerc(zfetch_misses, zfetch_access_total), - 'num': fHits(zfetch_misses), - } - - return output - - -def _dmu_summary(Kstat): - """Print information on the DMU""" - - arc = get_dmu_summary(Kstat) - - if arc['zfetch_access_total'] > 0: - sys.stdout.write("DMU Prefetch Efficiency:\t\t\t\t\t%s\n" % - arc['dmu']['efficiency']['value']) - sys.stdout.write("\tHit Ratio:\t\t\t%s\t%s\n" % ( - arc['dmu']['efficiency']['hit_ratio']['per'], - arc['dmu']['efficiency']['hit_ratio']['num'], - ) - ) - sys.stdout.write("\tMiss Ratio:\t\t\t%s\t%s\n" % ( - arc['dmu']['efficiency']['miss_ratio']['per'], - arc['dmu']['efficiency']['miss_ratio']['num'], - ) - ) - - sys.stdout.write("\n") - - -def get_vdev_summary(Kstat): - """Collect information on the VDEVs""" - - output = {} - - vdev_cache_delegations = \ - Kstat["kstat.zfs.misc.vdev_cache_stats.delegations"] - vdev_cache_misses = Kstat["kstat.zfs.misc.vdev_cache_stats.misses"] - vdev_cache_hits = Kstat["kstat.zfs.misc.vdev_cache_stats.hits"] - vdev_cache_total = (vdev_cache_misses + vdev_cache_hits + - vdev_cache_delegations) - - output['vdev_cache_total'] = vdev_cache_total - - if vdev_cache_total > 0: - output['summary'] = fHits(vdev_cache_total) - output['hit_ratio'] = { - 'per': fPerc(vdev_cache_hits, vdev_cache_total), - 'num': fHits(vdev_cache_hits), - } - output['miss_ratio'] = { - 'per': fPerc(vdev_cache_misses, vdev_cache_total), - 'num': fHits(vdev_cache_misses), - } - output['delegations'] = { - 'per': fPerc(vdev_cache_delegations, vdev_cache_total), - 'num': fHits(vdev_cache_delegations), - } - - return output - - -def _vdev_summary(Kstat): - """Print information on the VDEVs""" - - arc = get_vdev_summary(Kstat) - - if arc['vdev_cache_total'] > 0: - sys.stdout.write("VDEV Cache Summary:\t\t\t\t%s\n" % arc['summary']) - sys.stdout.write("\tHit Ratio:\t\t\t%s\t%s\n" % ( - arc['hit_ratio']['per'], - arc['hit_ratio']['num'], - )) - sys.stdout.write("\tMiss Ratio:\t\t\t%s\t%s\n" % ( - arc['miss_ratio']['per'], - arc['miss_ratio']['num'], - )) - sys.stdout.write("\tDelegations:\t\t\t%s\t%s\n" % ( - arc['delegations']['per'], - arc['delegations']['num'], - )) - - -def _tunable_summary(Kstat): - """Print information on tunables, including descriptions if requested""" - - global show_tunable_descriptions - global alternate_tunable_layout - - tunables = load_tunables() - descriptions = {} - - if show_tunable_descriptions: - - command = ["/sbin/modinfo", "zfs", "-0"] - - try: - p = Popen(command, stdin=PIPE, stdout=PIPE, - stderr=PIPE, shell=False, close_fds=True) - p.wait() - - # By default, Python 2 returns a string as the first element of the - # tuple from p.communicate(), while Python 3 returns bytes which - # must be decoded first. The better way to do this would be with - # subprocess.run() or at least .check_output(), but this fails on - # CentOS 6 because of its old version of Python 2 - desc = bytes.decode(p.communicate()[0]) - description_list = desc.strip().split('\0') - - if p.returncode == 0: - for tunable in description_list: - if tunable[0:5] == 'parm:': - tunable = tunable[5:].strip() - name, description = tunable.split(':', 1) - if not description: - description = "Description unavailable" - descriptions[name] = description - else: - sys.stderr.write("%s: '%s' exited with code %i\n" % - (sys.argv[0], command[0], p.returncode)) - sys.stderr.write("Tunable descriptions will be disabled.\n") - except OSError as e: - sys.stderr.write("%s: Cannot run '%s': %s\n" % - (sys.argv[0], command[0], e.strerror)) - sys.stderr.write("Tunable descriptions will be disabled.\n") - - sys.stdout.write("ZFS Tunables:\n") - - if alternate_tunable_layout: - fmt = "\t%s=%s\n" - else: - fmt = "\t%-50s%s\n" - - for name in sorted(tunables.keys()): - if show_tunable_descriptions and name in descriptions: - sys.stdout.write("\t# %s\n" % descriptions[name]) - - sys.stdout.write(fmt % (name, tunables[name])) - - -unSub = [ - _arc_summary, - _arc_efficiency, - _l2arc_summary, - _dmu_summary, - _vdev_summary, - _tunable_summary -] - - -def zfs_header(): - """Print title string with date""" - - daydate = time.strftime('%a %b %d %H:%M:%S %Y') - - sys.stdout.write('\n'+'-'*72+'\n') - sys.stdout.write('ZFS Subsystem Report\t\t\t\t%s' % daydate) - sys.stdout.write('\n') - - -def usage(): - """Print usage information""" - - sys.stdout.write("Usage: arc_summary [-h] [-a] [-d] [-p PAGE]\n\n") - sys.stdout.write("\t -h, --help : " - "Print this help message and exit\n") - sys.stdout.write("\t -a, --alternate : " - "Show an alternate sysctl layout\n") - sys.stdout.write("\t -d, --description : " - "Show the sysctl descriptions\n") - sys.stdout.write("\t -p PAGE, --page=PAGE : " - "Select a single output page to display,\n") - sys.stdout.write("\t " - "should be an integer between 1 and " + - str(len(unSub)) + "\n\n") - sys.stdout.write("Examples:\n") - sys.stdout.write("\tarc_summary -a\n") - sys.stdout.write("\tarc_summary -p 4\n") - sys.stdout.write("\tarc_summary -ad\n") - sys.stdout.write("\tarc_summary --page=2\n") - - -def main(): - """Main function""" - - global show_tunable_descriptions - global alternate_tunable_layout - - try: - try: - opts, args = getopt.getopt( - sys.argv[1:], - "adp:h", ["alternate", "description", "page=", "help"] - ) - except getopt.error as e: - sys.stderr.write("Error: %s\n" % e.msg) - usage() - sys.exit(1) - - args = {} - for opt, arg in opts: - if opt in ('-a', '--alternate'): - args['a'] = True - if opt in ('-d', '--description'): - args['d'] = True - if opt in ('-p', '--page'): - args['p'] = arg - if opt in ('-h', '--help'): - usage() - sys.exit(0) - - Kstat = get_Kstat() - - alternate_tunable_layout = 'a' in args - show_tunable_descriptions = 'd' in args - - pages = [] - - if 'p' in args: - try: - pages.append(unSub[int(args['p']) - 1]) - except IndexError: - sys.stderr.write('the argument to -p must be between 1 and ' + - str(len(unSub)) + '\n') - sys.exit(1) - else: - pages = unSub - - zfs_header() - for page in pages: - page(Kstat) - sys.stdout.write("\n") - except IOError as ex: - if (ex.errno == errno.EPIPE): - sys.exit(0) - raise - except KeyboardInterrupt: - sys.exit(0) - - -if __name__ == '__main__': - main() diff --git a/cmd/arc_summary/arc_summary3 b/cmd/arc_summary/arc_summary3 index 301c485b34..9d0c2d30dd 100755 --- a/cmd/arc_summary/arc_summary3 +++ b/cmd/arc_summary/arc_summary3 @@ -191,21 +191,13 @@ elif sys.platform.startswith('linux'): # there, so we fall back on modinfo command = ["/sbin/modinfo", request, "-0"] - # The recommended way to do this is with subprocess.run(). However, - # some installed versions of Python are < 3.5, so we offer them - # the option of doing it the old way (for now) info = '' try: - if 'run' in dir(subprocess): - info = subprocess.run(command, stdout=subprocess.PIPE, - universal_newlines=True) - raw_output = info.stdout.split('\0') - else: - info = subprocess.check_output(command, - universal_newlines=True) - raw_output = info.split('\0') + info = subprocess.run(command, stdout=subprocess.PIPE, + check=True, universal_newlines=True) + raw_output = info.stdout.split('\0') except subprocess.CalledProcessError: print("Error: Descriptions not available", diff --git a/cmd/arcstat/arcstat.in b/cmd/arcstat/arcstat.in index 425e52d1f5..0128fd8175 100755 --- a/cmd/arcstat/arcstat.in +++ b/cmd/arcstat/arcstat.in @@ -47,7 +47,7 @@ # @hdr is the array of fields that needs to be printed, so we # just iterate over this array and print the values using our pretty printer. # -# This script must remain compatible with Python 2.6+ and Python 3.4+. +# This script must remain compatible with Python 3.6+. # import sys diff --git a/cmd/dbufstat/dbufstat.in b/cmd/dbufstat/dbufstat.in index 82250353f5..b716a0c974 100755 --- a/cmd/dbufstat/dbufstat.in +++ b/cmd/dbufstat/dbufstat.in @@ -27,7 +27,7 @@ # Copyright (C) 2013 Lawrence Livermore National Security, LLC. # Produced at Lawrence Livermore National Laboratory (cf, DISCLAIMER). # -# This script must remain compatible with Python 2.6+ and Python 3.4+. +# This script must remain compatible with and Python 3.6+. # import sys diff --git a/config/always-python.m4 b/config/always-python.m4 index 5f47df424c..5a2008124f 100644 --- a/config/always-python.m4 +++ b/config/always-python.m4 @@ -1,7 +1,6 @@ dnl # dnl # The majority of the python scripts are written to be compatible -dnl # with Python 2.6 and Python 3.4. Therefore, they may be installed -dnl # and used with either interpreter. This option is intended to +dnl # with Python 3.6. This option is intended to dnl # to provide a method to specify the default system version, and dnl # set the PYTHON environment variable accordingly. dnl # @@ -13,9 +12,7 @@ AC_DEFUN([ZFS_AC_CONFIG_ALWAYS_PYTHON], [ [with_python=check]) AS_CASE([$with_python], - [check], [AC_CHECK_PROGS([PYTHON], [python3 python2], [:])], - [2*], [PYTHON="python${with_python}"], - [*python2*], [PYTHON="${with_python}"], + [check], [AC_CHECK_PROGS([PYTHON], [python3], [:])], [3*], [PYTHON="python${with_python}"], [*python3*], [PYTHON="${with_python}"], [no], [PYTHON=":"], @@ -23,8 +20,7 @@ AC_DEFUN([ZFS_AC_CONFIG_ALWAYS_PYTHON], [ ) dnl # - dnl # Minimum supported Python versions for utilities: - dnl # Python 2.6 or Python 3.4 + dnl # Minimum supported Python versions for utilities: Python 3.6 dnl # AM_PATH_PYTHON([], [], [:]) AS_IF([test -z "$PYTHON_VERSION"], [ @@ -33,25 +29,16 @@ AC_DEFUN([ZFS_AC_CONFIG_ALWAYS_PYTHON], [ PYTHON_MINOR=${PYTHON_VERSION#*\.} AS_CASE([$PYTHON_VERSION], - [2.*], [ - AS_IF([test $PYTHON_MINOR -lt 6], - [AC_MSG_ERROR("Python >= 2.6 is required")]) - ], [3.*], [ - AS_IF([test $PYTHON_MINOR -lt 4], - [AC_MSG_ERROR("Python >= 3.4 is required")]) + AS_IF([test $PYTHON_MINOR -lt 6], + [AC_MSG_ERROR("Python >= 3.6 is required")]) ], [:|2|3], [], [PYTHON_VERSION=3] ) AM_CONDITIONAL([USING_PYTHON], [test "$PYTHON" != :]) - AM_CONDITIONAL([USING_PYTHON_2], [test "x${PYTHON_VERSION%%\.*}" = x2]) - AM_CONDITIONAL([USING_PYTHON_3], [test "x${PYTHON_VERSION%%\.*}" = x3]) - - AM_COND_IF([USING_PYTHON_2], - [AC_SUBST([PYTHON_SHEBANG], [python2])], - [AC_SUBST([PYTHON_SHEBANG], [python3])]) + AC_SUBST([PYTHON_SHEBANG], [python3]) dnl # dnl # Request that packages be built for a specific Python version. diff --git a/config/always-pyzfs.m4 b/config/always-pyzfs.m4 index efea49f5f0..9b123b1b2d 100644 --- a/config/always-pyzfs.m4 +++ b/config/always-pyzfs.m4 @@ -18,7 +18,7 @@ AC_DEFUN([ZFS_AC_PYTHON_MODULE], [ ]) dnl # -dnl # Determines if pyzfs can be built, requires Python 2.7 or later. +dnl # Determines if pyzfs can be built, requires Python 3.6 or later. dnl # AC_DEFUN([ZFS_AC_CONFIG_ALWAYS_PYZFS], [ AC_ARG_ENABLE([pyzfs], @@ -72,12 +72,11 @@ AC_DEFUN([ZFS_AC_CONFIG_ALWAYS_PYZFS], [ ]) dnl # - dnl # Require python-devel libraries + dnl # Require python3-devel libraries dnl # AS_IF([test "x$enable_pyzfs" = xcheck -o "x$enable_pyzfs" = xyes], [ AS_CASE([$PYTHON_VERSION], - [3.*], [PYTHON_REQUIRED_VERSION=">= '3.4.0'"], - [2.*], [PYTHON_REQUIRED_VERSION=">= '2.7.0'"], + [3.*], [PYTHON_REQUIRED_VERSION=">= '3.6.0'"], [AC_MSG_ERROR("Python $PYTHON_VERSION unknown")] ) diff --git a/contrib/pyzfs/setup.py.in b/contrib/pyzfs/setup.py.in index bd8ffc728f..934b3189eb 100644 --- a/contrib/pyzfs/setup.py.in +++ b/contrib/pyzfs/setup.py.in @@ -29,13 +29,12 @@ setup( "Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.4", - "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", "Topic :: System :: Filesystems", "Topic :: Software Development :: Libraries", ], @@ -53,7 +52,7 @@ setup( setup_requires=[ "cffi", ], - python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,<4', + python_requires='>=3.6,<4', zip_safe=False, test_suite="libzfs_core.test", ) diff --git a/rpm/generic/zfs.spec.in b/rpm/generic/zfs.spec.in index 0fafd192f1..3dce92acbb 100644 --- a/rpm/generic/zfs.spec.in +++ b/rpm/generic/zfs.spec.in @@ -3,7 +3,7 @@ # Set the default udev directory based on distribution. %if %{undefined _udevdir} -%if 0%{?fedora} >= 17 || 0%{?rhel} >= 7 || 0%{?centos} >= 7 || 0%{?openEuler} +%if 0%{?fedora}%{?rhel}%{?centos}%{?openEuler} %global _udevdir %{_prefix}/lib/udev %else %global _udevdir /lib/udev @@ -12,7 +12,7 @@ # Set the default udevrule directory based on distribution. %if %{undefined _udevruledir} -%if 0%{?fedora} >= 17 || 0%{?rhel} >= 7 || 0%{?centos} >= 7 || 0%{?openEuler} +%if 0%{?fedora}%{?rhel}%{?centos}%{?openEuler} %global _udevruledir %{_prefix}/lib/udev/rules.d %else %global _udevruledir /lib/udev/rules.d @@ -21,7 +21,7 @@ # Set the default dracut directory based on distribution. %if %{undefined _dracutdir} -%if 0%{?fedora} >= 17 || 0%{?rhel} >= 7 || 0%{?centos} >= 7 || 0%{?openEuler} +%if 0%{?fedora}%{?rhel}%{?centos}%{?openEuler} %global _dracutdir %{_prefix}/lib/dracut %else %global _dracutdir %{_prefix}/share/dracut @@ -57,64 +57,28 @@ %bcond_with asan %bcond_with systemd %bcond_with pam +%bcond_without pyzfs # Generic enable switch for systemd %if %{with systemd} %define _systemd 1 %endif -# RHEL >= 7 comes with systemd -%if 0%{?rhel} >= 7 +# Distros below support systemd +%if 0%{?rhel}%{?fedora}%{?centos}%{?suse_version} %define _systemd 1 %endif -# Fedora >= 15 comes with systemd, but only >= 18 has -# the proper macros -%if 0%{?fedora} >= 18 -%define _systemd 1 -%endif - -# opensuse >= 12.1 comes with systemd, but only >= 13.1 -# has the proper macros -%if 0%{?suse_version} >= 1310 -%define _systemd 1 -%endif - -# openEuler comes with systemd -%if 0%{?openEuler} -%define _systemd 1 -%endif - -# When not specified default to distribution provided version. This -# is normally Python 3, but for RHEL <= 7 only Python 2 is provided. +# When not specified default to distribution provided version. %if %{undefined __use_python} -%if 0%{?rhel} && 0%{?rhel} <= 7 -%define __python /usr/bin/python2 -%define __python_pkg_version 2 -%define __python_cffi_pkg python-cffi -%define __python_setuptools_pkg python-setuptools -%else %define __python /usr/bin/python3 %define __python_pkg_version 3 -%define __python_cffi_pkg python3-cffi -%define __python_setuptools_pkg python3-setuptools -%endif %else %define __python %{__use_python} %define __python_pkg_version %{__use_python_pkg_version} -%define __python_cffi_pkg python%{__python_pkg_version}-cffi -%define __python_setuptools_pkg python%{__python_pkg_version}-setuptools %endif %define __python_sitelib %(%{__python} -Esc "from distutils.sysconfig import get_python_lib; print(get_python_lib())") -# By default python-pyzfs is enabled, with the exception of -# RHEL 6 which by default uses Python 2.6 which is too old. -%if 0%{?rhel} == 6 -%bcond_with pyzfs -%else -%bcond_without pyzfs -%endif - Name: @PACKAGE@ Version: @VERSION@ Release: @RELEASE@%{?dist} @@ -137,7 +101,7 @@ Obsoletes: spl <= %{version} # Renaming those on either side would conflict with all available documentation. Conflicts: zfs-fuse -%if 0%{?rhel}%{?fedora}%{?suse_version}%{?openEuler} +%if 0%{?rhel}%{?centos}%{?fedora}%{?suse_version}%{?openEuler} BuildRequires: gcc, make BuildRequires: zlib-devel BuildRequires: libuuid-devel @@ -290,7 +254,7 @@ Requires: sudo Requires: sysstat Requires: libaio Requires: python%{__python_pkg_version} -%if 0%{?rhel}%{?fedora}%{?suse_version}%{?openEuler} +%if 0%{?rhel}%{?centos}%{?fedora}%{?suse_version}%{?openEuler} BuildRequires: libaio-devel %endif AutoReqProv: no @@ -313,6 +277,8 @@ This package contains a dracut module used to construct an initramfs image which is ZFS aware. %if %{with pyzfs} +# Enforce `python36-` package prefix for CentOS 7 +# since dependencies come from EPEL and are named this way %package -n python%{__python_pkg_version}-pyzfs Summary: Python %{python_version} wrapper for libzfs_core Group: Development/Languages/Python @@ -322,16 +288,26 @@ Requires: libzfs5 = %{version}-%{release} Requires: libnvpair3 = %{version}-%{release} Requires: libffi Requires: python%{__python_pkg_version} -Requires: %{__python_cffi_pkg} -%if 0%{?rhel}%{?fedora}%{?suse_version}%{?openEuler} -%if 0%{?rhel} >= 8 || 0%{?centos} >= 8 || 0%{?fedora} >= 28 || 0%{?openEuler} -BuildRequires: python3-packaging + +%if 0%{?centos} == 7 +Requires: python36-cffi %else -BuildRequires: python-packaging +Requires: python%{__python_pkg_version}-cffi %endif + +%if 0%{?rhel}%{?centos}%{?fedora}%{?suse_version}%{?openEuler} +%if 0%{?centos} == 7 +BuildRequires: python36-packaging +BuildRequires: python36-devel +BuildRequires: python36-cffi +BuildRequires: python36-setuptools +%else +BuildRequires: python%{__python_pkg_version}-packaging BuildRequires: python%{__python_pkg_version}-devel -BuildRequires: %{__python_cffi_pkg} -BuildRequires: %{__python_setuptools_pkg} +BuildRequires: python%{__python_pkg_version}-cffi +BuildRequires: python%{__python_pkg_version}-setuptools +%endif + BuildRequires: libffi-devel %endif @@ -490,7 +466,7 @@ systemctl --system daemon-reload >/dev/null || true %{_bindir}/raidz_test %{_sbindir}/zgenhostid %{_bindir}/zvol_wait -# Optional Python 2/3 scripts +# Optional Python 3 scripts %{_bindir}/arc_summary %{_bindir}/arcstat %{_bindir}/dbufstat diff --git a/tests/test-runner/bin/test-runner.py.in b/tests/test-runner/bin/test-runner.py.in index 4e439d2b56..a652d3d4a0 100755 --- a/tests/test-runner/bin/test-runner.py.in +++ b/tests/test-runner/bin/test-runner.py.in @@ -15,19 +15,14 @@ # Copyright (c) 2012, 2018 by Delphix. All rights reserved. # Copyright (c) 2019 Datto Inc. # -# This script must remain compatible with Python 2.6+ and Python 3.4+. +# This script must remain compatible with Python 3.6+. # -# some python 2.7 system don't have a configparser shim -try: - import configparser -except ImportError: - import ConfigParser as configparser - import os import sys import ctypes import re +import configparser from datetime import datetime from optparse import OptionParser @@ -38,7 +33,7 @@ from subprocess import PIPE from subprocess import Popen from subprocess import check_output from threading import Timer -from time import time +from time import time, CLOCK_MONOTONIC_RAW from os.path import exists BASEDIR = '/var/tmp/test_results' @@ -52,9 +47,6 @@ LOG_OUT = 'LOG_OUT' LOG_ERR = 'LOG_ERR' LOG_FILE_OBJ = None -# some python 2.7 system don't have a concept of monotonic time -CLOCK_MONOTONIC_RAW = 4 # see - class timespec(ctypes.Structure): _fields_ = [ diff --git a/tests/test-runner/bin/zts-report.py.in b/tests/test-runner/bin/zts-report.py.in index 71b0cc8d64..432899c21f 100755 --- a/tests/test-runner/bin/zts-report.py.in +++ b/tests/test-runner/bin/zts-report.py.in @@ -15,7 +15,7 @@ # Copyright (c) 2017 by Delphix. All rights reserved. # Copyright (c) 2018 by Lawrence Livermore National Security, LLC. # -# This script must remain compatible with Python 2.6+ and Python 3.4+. +# This script must remain compatible with Python 3.6+. # import os @@ -62,13 +62,13 @@ known_reason = 'Known issue' exec_reason = 'Test user execute permissions required for utilities' # -# Some tests require a minimum python version of 3.5 and will be skipped when +# Some tests require a minimum python version of 3.6 and will be skipped when # the default system version is too old. There may also be tests which require -# additional python modules be installed, for example python-cffi is required +# additional python modules be installed, for example python3-cffi is required # by the pyzfs tests. # -python_reason = 'Python v3.5 or newer required' -python_deps_reason = 'Python modules missing: python-cffi' +python_reason = 'Python v3.6 or newer required' +python_deps_reason = 'Python modules missing: python3-cffi' # # Some tests require the O_TMPFILE flag which was first introduced in the diff --git a/tests/zfs-tests/include/commands.cfg b/tests/zfs-tests/include/commands.cfg index a565ea8d51..78802c9fb9 100644 --- a/tests/zfs-tests/include/commands.cfg +++ b/tests/zfs-tests/include/commands.cfg @@ -70,8 +70,6 @@ export SYSTEM_FILES_COMMON='arp printf ps pwd - python - python2 python3 quotaon readlink diff --git a/tests/zfs-tests/tests/functional/cli_root/zfs_program/zfs_program_json.ksh b/tests/zfs-tests/tests/functional/cli_root/zfs_program/zfs_program_json.ksh index 3788543b0b..b0265c5ee4 100755 --- a/tests/zfs-tests/tests/functional/cli_root/zfs_program/zfs_program_json.ksh +++ b/tests/zfs-tests/tests/functional/cli_root/zfs_program/zfs_program_json.ksh @@ -92,27 +92,13 @@ typeset -a pos_cmds_out=( } }") -# -# N.B. json.tool is needed to guarantee consistent ordering of fields, -# sed is needed to trim trailing space in CentOS 6's json.tool output -# -# As of Python 3.5 the behavior of json.tool changed to keep the order -# the same as the input and the --sort-keys option was added. Detect when -# --sort-keys is supported and apply the option to ensure the expected order. -# -if python -m json.tool --sort-keys <<< "{}"; then - JSON_TOOL_CMD="python -m json.tool --sort-keys" -else - JSON_TOOL_CMD="python -m json.tool" -fi - typeset -i cnt=0 typeset cmd for cmd in ${pos_cmds[@]}; do log_must zfs program $TESTPOOL $TESTZCP $TESTDS $cmd 2>&1 log_must zfs program -j $TESTPOOL $TESTZCP $TESTDS $cmd 2>&1 OUTPUT=$(zfs program -j $TESTPOOL $TESTZCP $TESTDS $cmd 2>&1 | - $JSON_TOOL_CMD | sed 's/[[:space:]]*$//') + python3 -m json.tool --sort-keys) if [ "$OUTPUT" != "${pos_cmds_out[$cnt]}" ]; then log_note "Got :$OUTPUT" log_note "Expected:${pos_cmds_out[$cnt]}" diff --git a/tests/zfs-tests/tests/functional/pyzfs/pyzfs_unittest.ksh.in b/tests/zfs-tests/tests/functional/pyzfs/pyzfs_unittest.ksh.in index 4ca610e5f1..1f58d8116b 100755 --- a/tests/zfs-tests/tests/functional/pyzfs/pyzfs_unittest.ksh.in +++ b/tests/zfs-tests/tests/functional/pyzfs/pyzfs_unittest.ksh.in @@ -30,7 +30,7 @@ verify_runnable "global" # Verify that the required dependencies for testing are installed. @PYTHON@ -c "import cffi" 2>/dev/null if [ $? -eq 1 ]; then - log_unsupported "python-cffi not found by Python" + log_unsupported "python3-cffi not found by Python" fi # We don't just try to "import libzfs_core" because we want to skip these tests diff --git a/tests/zfs-tests/tests/functional/rsend/send_encrypted_files.ksh b/tests/zfs-tests/tests/functional/rsend/send_encrypted_files.ksh index 370f5382eb..661fbe85db 100755 --- a/tests/zfs-tests/tests/functional/rsend/send_encrypted_files.ksh +++ b/tests/zfs-tests/tests/functional/rsend/send_encrypted_files.ksh @@ -87,7 +87,7 @@ log_must xattrtest -f 10 -x 3 -s 32768 -r -k -p /$TESTPOOL/$TESTFS2/xattrsadir # OpenZFS issue #7432 log_must zfs set compression=on xattr=sa $TESTPOOL/$TESTFS2 log_must touch /$TESTPOOL/$TESTFS2/attrs -log_must eval "python -c 'print \"a\" * 4096' | \ +log_must eval "python3 -c 'print \"a\" * 4096' | \ set_xattr_stdin bigval /$TESTPOOL/$TESTFS2/attrs" log_must zfs set compression=off xattr=on $TESTPOOL/$TESTFS2 diff --git a/tests/zfs-tests/tests/functional/rsend/send_realloc_dnode_size.ksh b/tests/zfs-tests/tests/functional/rsend/send_realloc_dnode_size.ksh index 551ed15db2..bd30488eaa 100755 --- a/tests/zfs-tests/tests/functional/rsend/send_realloc_dnode_size.ksh +++ b/tests/zfs-tests/tests/functional/rsend/send_realloc_dnode_size.ksh @@ -88,7 +88,7 @@ log_must zfs snapshot $POOL/fs@c # 4. Create an empty file and add xattrs to it to exercise reclaiming a # dnode that requires more than 1 slot for its bonus buffer (Zol #7433) log_must zfs set compression=on xattr=sa $POOL/fs -log_must eval "python -c 'print \"a\" * 512' | +log_must eval "python3 -c 'print \"a\" * 512' | set_xattr_stdin bigval /$POOL/fs/attrs" log_must zfs snapshot $POOL/fs@d