1 #!/usr/bin/env @PYTHON_SHEBANG@
3 # Print out statistics for all cached dmu buffers. This information
4 # is available through the dbufs kstat and may be post-processed as
5 # needed by the script.
9 # The contents of this file are subject to the terms of the
10 # Common Development and Distribution License, Version 1.0 only
11 # (the "License"). You may not use this file except in compliance
14 # You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
15 # or https://opensource.org/licenses/CDDL-1.0.
16 # See the License for the specific language governing permissions
17 # and limitations under the License.
19 # When distributing Covered Code, include this CDDL HEADER in each
20 # file and include the License file at usr/src/OPENSOLARIS.LICENSE.
21 # If applicable, add the following below this CDDL HEADER, with the
22 # fields enclosed by brackets "[]" replaced with your own identifying
23 # information: Portions Copyright [yyyy] [name of copyright owner]
27 # Copyright (C) 2013 Lawrence Livermore National Security, LLC.
28 # Produced at Lawrence Livermore National Laboratory (cf, DISCLAIMER).
30 # This script must remain compatible with and Python 3.6+.
38 bhdr = ["pool", "objset", "object", "level", "blkid", "offset", "dbsize"]
39 bxhdr = ["pool", "objset", "object", "level", "blkid", "offset", "dbsize",
40 "usize", "meta", "state", "dbholds", "dbc", "list", "atype", "flags",
41 "count", "asize", "access", "mru", "gmru", "mfu", "gmfu", "l2",
42 "l2_dattr", "l2_asize", "l2_comp", "aholds", "dtype", "btype",
43 "data_bs", "meta_bs", "bsize", "lvls", "dholds", "blocks", "dsize"]
44 bincompat = ["cached", "direct", "indirect", "bonus", "spill"]
46 dhdr = ["pool", "objset", "object", "dtype", "cached"]
47 dxhdr = ["pool", "objset", "object", "dtype", "btype", "data_bs", "meta_bs",
48 "bsize", "lvls", "dholds", "blocks", "dsize", "cached", "direct",
49 "indirect", "bonus", "spill"]
50 dincompat = ["level", "blkid", "offset", "dbsize", "usize", "meta", "state",
51 "dbholds", "dbc", "list", "atype", "flags", "count", "asize",
52 "access", "mru", "gmru", "mfu", "gmfu", "l2", "l2_dattr",
53 "l2_asize", "l2_comp", "aholds"]
55 thdr = ["pool", "objset", "dtype", "cached"]
56 txhdr = ["pool", "objset", "dtype", "cached", "direct", "indirect",
58 tincompat = ["object", "level", "blkid", "offset", "dbsize", "usize", "meta",
59 "state", "dbc", "dbholds", "list", "atype", "flags", "count",
60 "asize", "access", "mru", "gmru", "mfu", "gmfu", "l2", "l2_dattr",
61 "l2_asize", "l2_comp", "aholds", "btype", "data_bs", "meta_bs",
62 "bsize", "lvls", "dholds", "blocks", "dsize"]
65 # hdr: [size, scale, description]
66 "pool": [15, -1, "pool name"],
67 "objset": [6, -1, "dataset identification number"],
68 "object": [10, -1, "object number"],
69 "level": [5, -1, "indirection level of buffer"],
70 "blkid": [8, -1, "block number of buffer"],
71 "offset": [12, 1024, "offset in object of buffer"],
72 "dbsize": [7, 1024, "size of buffer"],
73 "usize": [7, 1024, "size of attached user data"],
74 "meta": [4, -1, "is this buffer metadata?"],
75 "state": [5, -1, "state of buffer (read, cached, etc)"],
76 "dbholds": [7, 1000, "number of holds on buffer"],
77 "dbc": [3, -1, "in dbuf cache"],
78 "list": [4, -1, "which ARC list contains this buffer"],
79 "atype": [7, -1, "ARC header type (data or metadata)"],
80 "flags": [9, -1, "ARC read flags"],
81 "count": [5, -1, "ARC data count"],
82 "asize": [7, 1024, "size of this ARC buffer"],
83 "access": [10, -1, "time this ARC buffer was last accessed"],
84 "mru": [5, 1000, "hits while on the ARC's MRU list"],
85 "gmru": [5, 1000, "hits while on the ARC's MRU ghost list"],
86 "mfu": [5, 1000, "hits while on the ARC's MFU list"],
87 "gmfu": [5, 1000, "hits while on the ARC's MFU ghost list"],
88 "l2": [5, 1000, "hits while on the L2ARC"],
89 "l2_dattr": [8, -1, "L2ARC disk address/offset"],
90 "l2_asize": [8, 1024, "L2ARC alloc'd size (depending on compression)"],
91 "l2_comp": [21, -1, "L2ARC compression algorithm for buffer"],
92 "aholds": [6, 1000, "number of holds on this ARC buffer"],
93 "dtype": [27, -1, "dnode type"],
94 "btype": [27, -1, "bonus buffer type"],
95 "data_bs": [7, 1024, "data block size"],
96 "meta_bs": [7, 1024, "metadata block size"],
97 "bsize": [6, 1024, "bonus buffer size"],
98 "lvls": [6, -1, "number of indirection levels"],
99 "dholds": [6, 1000, "number of holds on dnode"],
100 "blocks": [8, 1000, "number of allocated blocks"],
101 "dsize": [12, 1024, "size of dnode"],
102 "cached": [6, 1024, "bytes cached for all blocks"],
103 "direct": [6, 1024, "bytes cached for direct blocks"],
104 "indirect": [8, 1024, "bytes cached for indirect blocks"],
105 "bonus": [5, 1024, "bytes cached for bonus buffer"],
106 "spill": [5, 1024, "bytes cached for spill block"],
111 sep = " " # Default separator is 2 spaces
112 cmd = ("Usage: dbufstat [-bdhnrtvx] [-i file] [-f fields] [-o file] "
113 "[-s string] [-F filter]\n")
117 if sys.platform.startswith("freebsd"):
119 # Requires py-sysctl on FreeBSD
123 dbufs = sysctl.filter("kstat.zfs.misc.dbufs")[0].value
124 sys.stdin = io.StringIO(dbufs)
127 elif sys.platform.startswith("linux"):
129 return "/proc/spl/kstat/zfs/dbufs"
132 def print_incompat_helper(incompat):
134 for key in sorted(incompat):
136 sys.stderr.write("\t")
138 sys.stderr.write(",\n\t")
141 sys.stderr.write(", ")
143 sys.stderr.write("%s" % key)
146 sys.stderr.write("\n\n")
149 def detailed_usage():
150 sys.stderr.write("%s\n" % cmd)
152 sys.stderr.write("Field definitions incompatible with '-b' option:\n")
153 print_incompat_helper(bincompat)
155 sys.stderr.write("Field definitions incompatible with '-d' option:\n")
156 print_incompat_helper(dincompat)
158 sys.stderr.write("Field definitions incompatible with '-t' option:\n")
159 print_incompat_helper(tincompat)
161 sys.stderr.write("Field definitions are as follows:\n")
162 for key in sorted(cols.keys()):
163 sys.stderr.write("%11s : %s\n" % (key, cols[key][2]))
164 sys.stderr.write("\n")
170 sys.stderr.write("%s\n" % cmd)
171 sys.stderr.write("\t -b : Print table of information for each dbuf\n")
172 sys.stderr.write("\t -d : Print table of information for each dnode\n")
173 sys.stderr.write("\t -h : Print this help message\n")
174 sys.stderr.write("\t -n : Exclude header from output\n")
175 sys.stderr.write("\t -r : Print raw values\n")
176 sys.stderr.write("\t -t : Print table of information for each dnode type"
178 sys.stderr.write("\t -v : List all possible field headers and definitions"
180 sys.stderr.write("\t -x : Print extended stats\n")
181 sys.stderr.write("\t -i : Redirect input from the specified file\n")
182 sys.stderr.write("\t -f : Specify specific fields to print (see -v)\n")
183 sys.stderr.write("\t -o : Redirect output to the specified file\n")
184 sys.stderr.write("\t -s : Override default field separator with custom "
185 "character or string\n")
186 sys.stderr.write("\t -F : Filter output by value or regex\n")
187 sys.stderr.write("\nExamples:\n")
188 sys.stderr.write("\tdbufstat -d -o /tmp/d.log\n")
189 sys.stderr.write("\tdbufstat -t -s \",\" -o /tmp/t.log\n")
190 sys.stderr.write("\tdbufstat -v\n")
191 sys.stderr.write("\tdbufstat -d -f pool,object,objset,dsize,cached\n")
192 sys.stderr.write("\tdbufstat -bx -F dbc=1,objset=54,pool=testpool\n")
193 sys.stderr.write("\n")
198 def prettynum(sz, scale, num=0):
201 suffix = [' ', 'K', 'M', 'G', 'T', 'P', 'E', 'Z']
205 if raw or scale == -1:
206 return "%*s" % (sz, num)
208 # Rounding error, return 0
212 while num > scale and index < 5:
218 return "%*d" % (sz, num)
220 if (save / scale) < 10:
221 return "%*.1f%s" % (sz - 1, num, suffix[index])
223 return "%*d%s" % (sz - 1, num, suffix[index])
232 sys.stdout.write("%s%s" % (
233 prettynum(cols[col][0], cols[col][1], v[col]), sep))
234 sys.stdout.write("\n")
236 if e.errno == errno.EPIPE:
246 sys.stdout.write("%*s%s" % (cols[col][0], col, sep))
247 sys.stdout.write("\n")
249 if e.errno == errno.EPIPE:
253 def get_typestring(t):
257 "DMU_OT_OBJECT_DIRECTORY",
258 "DMU_OT_OBJECT_ARRAY",
259 "DMU_OT_PACKED_NVLIST",
260 "DMU_OT_PACKED_NVLIST_SIZE",
264 "DMU_OT_SPACE_MAP_HEADER",
273 "DMU_OT_DSL_DIR_CHILD_MAP",
274 "DMU_OT_DSL_DS_SNAP_MAP",
276 "DMU_OT_DSL_DATASET",
280 "DMU_OT_PLAIN_FILE_CONTENTS",
281 "DMU_OT_DIRECTORY_CONTENTS",
282 "DMU_OT_MASTER_NODE",
283 "DMU_OT_UNLINKED_SET",
287 # other; for testing only!
288 "DMU_OT_PLAIN_OTHER",
289 "DMU_OT_UINT64_OTHER",
293 "DMU_OT_SPA_HISTORY",
294 "DMU_OT_SPA_HISTORY_OFFSETS",
301 "DMU_OT_NEXT_CLONES",
303 "DMU_OT_USERGROUP_USED",
304 "DMU_OT_USERGROUP_QUOTA",
309 "DMU_OT_SA_MASTER_NODE",
310 "DMU_OT_SA_ATTR_REGISTRATION",
311 "DMU_OT_SA_ATTR_LAYOUTS",
315 "DMU_OT_DEADLIST_HDR",
317 "DMU_OT_BPOBJ_SUBOBJ"]
319 0x80: "DMU_OTN_UINT8_DATA",
320 0xc0: "DMU_OTN_UINT8_METADATA",
321 0x81: "DMU_OTN_UINT16_DATA",
322 0xc1: "DMU_OTN_UINT16_METADATA",
323 0x82: "DMU_OTN_UINT32_DATA",
324 0xc2: "DMU_OTN_UINT32_METADATA",
325 0x83: "DMU_OTN_UINT64_DATA",
326 0xc3: "DMU_OTN_UINT64_METADATA",
327 0x84: "DMU_OTN_ZAP_DATA",
328 0xc4: "DMU_OTN_ZAP_METADATA",
329 0xa0: "DMU_OTN_UINT8_ENC_DATA",
330 0xe0: "DMU_OTN_UINT8_ENC_METADATA",
331 0xa1: "DMU_OTN_UINT16_ENC_DATA",
332 0xe1: "DMU_OTN_UINT16_ENC_METADATA",
333 0xa2: "DMU_OTN_UINT32_ENC_DATA",
334 0xe2: "DMU_OTN_UINT32_ENC_METADATA",
335 0xa3: "DMU_OTN_UINT64_ENC_DATA",
336 0xe3: "DMU_OTN_UINT64_ENC_METADATA",
337 0xa4: "DMU_OTN_ZAP_ENC_DATA",
338 0xe4: "DMU_OTN_ZAP_ENC_METADATA"}
340 # If "-rr" option is used, don't convert to string representation
345 if t < len(ot_strings):
348 return otn_strings[t]
349 except (IndexError, KeyError):
353 def get_compstring(c):
354 comp_strings = ["ZIO_COMPRESS_INHERIT", "ZIO_COMPRESS_ON",
355 "ZIO_COMPRESS_OFF", "ZIO_COMPRESS_LZJB",
356 "ZIO_COMPRESS_EMPTY", "ZIO_COMPRESS_GZIP_1",
357 "ZIO_COMPRESS_GZIP_2", "ZIO_COMPRESS_GZIP_3",
358 "ZIO_COMPRESS_GZIP_4", "ZIO_COMPRESS_GZIP_5",
359 "ZIO_COMPRESS_GZIP_6", "ZIO_COMPRESS_GZIP_7",
360 "ZIO_COMPRESS_GZIP_8", "ZIO_COMPRESS_GZIP_9",
361 "ZIO_COMPRESS_ZLE", "ZIO_COMPRESS_LZ4",
362 "ZIO_COMPRESS_ZSTD", "ZIO_COMPRESS_FUNCTION"]
364 # If "-rr" option is used, don't convert to string representation
369 return comp_strings[c]
374 def parse_line(line, labels):
380 # These are "special" fields computed in the update_dict
381 # function, prevent KeyError exception on labels[col] for these.
382 if col not in ['bonus', 'cached', 'direct', 'indirect', 'spill']:
383 val = line[labels[col]]
385 if col in ['pool', 'flags']:
387 elif col in ['dtype', 'btype']:
388 new[col] = get_typestring(int(val))
389 elif col in ['l2_comp']:
390 new[col] = get_compstring(int(val))
397 def update_dict(d, k, line, labels):
398 pool = line[labels['pool']]
399 objset = line[labels['objset']]
400 key = line[labels[k]]
402 dbsize = int(line[labels['dbsize']])
403 usize = int(line[labels['usize']])
404 blkid = int(line[labels['blkid']])
405 level = int(line[labels['level']])
410 if objset not in d[pool]:
411 d[pool][objset] = dict()
413 if key not in d[pool][objset]:
414 d[pool][objset][key] = parse_line(line, labels)
415 d[pool][objset][key]['bonus'] = 0
416 d[pool][objset][key]['cached'] = 0
417 d[pool][objset][key]['direct'] = 0
418 d[pool][objset][key]['indirect'] = 0
419 d[pool][objset][key]['spill'] = 0
421 d[pool][objset][key]['cached'] += dbsize + usize
424 d[pool][objset][key]['bonus'] += dbsize
426 d[pool][objset][key]['spill'] += dbsize
429 d[pool][objset][key]['direct'] += dbsize
431 d[pool][objset][key]['indirect'] += dbsize
436 def skip_line(vals, filters):
438 Determines if a line should be skipped during printing
439 based on a set of filters
441 if len(filters) == 0:
446 val = prettynum(cols[key][0], cols[key][1], vals[key]).strip()
447 # we want a full match here
448 if re.match("(?:" + filters[key] + r")\Z", val) is None:
454 def print_dict(d, filters, noheader):
457 for pool in list(d.keys()):
458 for objset in list(d[pool].keys()):
459 for v in list(d[pool][objset].values()):
460 if not skip_line(v, filters):
464 def dnodes_build_dict(filehandle):
468 # First 3 lines are header information, skip the first two
472 # The third line contains the labels and index locations
473 for i, v in enumerate(next(filehandle).split()):
476 # The rest of the file is buffer information
477 for line in filehandle:
478 update_dict(dnodes, 'object', line.split(), labels)
483 def types_build_dict(filehandle):
487 # First 3 lines are header information, skip the first two
491 # The third line contains the labels and index locations
492 for i, v in enumerate(next(filehandle).split()):
495 # The rest of the file is buffer information
496 for line in filehandle:
497 update_dict(types, 'dtype', line.split(), labels)
502 def buffers_print_all(filehandle, filters, noheader):
505 # First 3 lines are header information, skip the first two
509 # The third line contains the labels and index locations
510 for i, v in enumerate(next(filehandle).split()):
516 # The rest of the file is buffer information
517 for line in filehandle:
518 vals = parse_line(line.split(), labels)
519 if not skip_line(vals, filters):
541 opts, args = getopt.getopt(
543 "bdf:hi:o:rs:tvxF:n",
562 for opt, arg in opts:
563 if opt in ('-b', '--buffers'):
565 if opt in ('-d', '--dnodes'):
567 if opt in ('-f', '--columns'):
569 if opt in ('-h', '--help'):
571 if opt in ('-i', '--infile'):
573 if opt in ('-o', '--outfile'):
575 if opt in ('-r', '--raw'):
577 if opt in ('-s', '--separator'):
579 if opt in ('-t', '--types'):
581 if opt in ('-v', '--verbose'):
583 if opt in ('-x', '--extended'):
585 if opt in ('-n', '--noheader'):
587 if opt in ('-F', '--filter'):
588 fils = [x.strip() for x in arg.split(",")]
591 f = [x.strip() for x in fil.split("=")]
594 sys.stderr.write("Invalid filter '%s'.\n" % fil)
598 sys.stderr.write("Invalid field '%s' in filter.\n" % f[0])
602 sys.stderr.write("Field '%s' specified multiple times in "
607 re.compile("(?:" + f[1] + r")\Z")
609 sys.stderr.write("Invalid regex for field '%s' in "
615 if hflag or (xflag and desired_cols):
621 # Ensure at most only one of b, d, or t flags are set
622 if (bflag and dflag) or (bflag and tflag) or (dflag and tflag):
626 hdr = bxhdr if xflag else bhdr
628 hdr = txhdr if xflag else thdr
629 else: # Even if dflag is False, it's the default if none set
631 hdr = dxhdr if xflag else dhdr
634 hdr = desired_cols.split(",")
641 elif ((bflag and bincompat and ele in bincompat) or
642 (dflag and dincompat and ele in dincompat) or
643 (tflag and tincompat and ele in tincompat)):
647 sys.stderr.write("Invalid column definition! -- %s\n" % invalid)
650 if len(incompat) > 0:
651 sys.stderr.write("Incompatible field specified! -- %s\n" %
657 tmp = open(ofile, "w")
661 sys.stderr.write("Cannot open %s for writing\n" % ofile)
665 ifile = default_ifile()
669 tmp = open(ifile, "r")
672 sys.stderr.write("Cannot open %s for reading\n" % ifile)
676 buffers_print_all(sys.stdin, filters, nflag)
679 print_dict(dnodes_build_dict(sys.stdin), filters, nflag)
682 print_dict(types_build_dict(sys.stdin), filters, nflag)
685 if __name__ == '__main__':