Current File : //proc/thread-self/root/var/lib/munin/datafile |
version 2.0.73
sixpack.studio;server1.sixpack.studio:if_eth0.graph_order down up down up
sixpack.studio;server1.sixpack.studio:if_eth0.graph_title eth0 traffic
sixpack.studio;server1.sixpack.studio:if_eth0.graph_args --base 1000
sixpack.studio;server1.sixpack.studio:if_eth0.graph_vlabel bits in (-) / out (+) per ${graph_period}
sixpack.studio;server1.sixpack.studio:if_eth0.graph_category network
sixpack.studio;server1.sixpack.studio:if_eth0.graph_info This graph shows the traffic of the eth0 network interface. Please note that the traffic is shown in bits per second, not bytes. IMPORTANT: On 32-bit systems the data source for this plugin uses 32-bit counters, which makes the plugin unreliable and unsuitable for most 100-Mb/s (or faster) interfaces, where traffic is expected to exceed 50 Mb/s over a 5 minute period. This means that this plugin is unsuitable for most 32-bit production environments. To avoid this problem, use the ip_ plugin instead. There should be no problems on 64-bit systems running 64-bit kernels.
sixpack.studio;server1.sixpack.studio:if_eth0.up.negative down
sixpack.studio;server1.sixpack.studio:if_eth0.up.label bps
sixpack.studio;server1.sixpack.studio:if_eth0.up.graph_data_size normal
sixpack.studio;server1.sixpack.studio:if_eth0.up.cdef up,8,*
sixpack.studio;server1.sixpack.studio:if_eth0.up.min 0
sixpack.studio;server1.sixpack.studio:if_eth0.up.type DERIVE
sixpack.studio;server1.sixpack.studio:if_eth0.up.update_rate 300
sixpack.studio;server1.sixpack.studio:if_eth0.up.info Traffic of the eth0 interface. Unable to determine interface speed. Please run the plugin as root.
sixpack.studio;server1.sixpack.studio:if_eth0.down.graph_data_size normal
sixpack.studio;server1.sixpack.studio:if_eth0.down.label received
sixpack.studio;server1.sixpack.studio:if_eth0.down.min 0
sixpack.studio;server1.sixpack.studio:if_eth0.down.cdef down,8,*
sixpack.studio;server1.sixpack.studio:if_eth0.down.type DERIVE
sixpack.studio;server1.sixpack.studio:if_eth0.down.graph no
sixpack.studio;server1.sixpack.studio:if_eth0.down.update_rate 300
sixpack.studio;server1.sixpack.studio:apache_volume.graph_title Apache volume
sixpack.studio;server1.sixpack.studio:apache_volume.graph_args --base 1000
sixpack.studio;server1.sixpack.studio:apache_volume.graph_vlabel bytes per ${graph_period}
sixpack.studio;server1.sixpack.studio:apache_volume.graph_category apache
sixpack.studio;server1.sixpack.studio:apache_volume.graph_order volume80
sixpack.studio;server1.sixpack.studio:apache_volume.volume80.label port 80
sixpack.studio;server1.sixpack.studio:apache_volume.volume80.max 1000000000
sixpack.studio;server1.sixpack.studio:apache_volume.volume80.graph_data_size normal
sixpack.studio;server1.sixpack.studio:apache_volume.volume80.min 0
sixpack.studio;server1.sixpack.studio:apache_volume.volume80.type DERIVE
sixpack.studio;server1.sixpack.studio:apache_volume.volume80.update_rate 300
sixpack.studio;server1.sixpack.studio:forks.graph_title Fork rate
sixpack.studio;server1.sixpack.studio:forks.graph_args --base 1000 -l 0
sixpack.studio;server1.sixpack.studio:forks.graph_vlabel forks / ${graph_period}
sixpack.studio;server1.sixpack.studio:forks.graph_category processes
sixpack.studio;server1.sixpack.studio:forks.graph_info This graph shows the number of forks (new processes started) per second.
sixpack.studio;server1.sixpack.studio:forks.graph_order forks
sixpack.studio;server1.sixpack.studio:forks.forks.update_rate 300
sixpack.studio;server1.sixpack.studio:forks.forks.info The number of forks per second.
sixpack.studio;server1.sixpack.studio:forks.forks.type DERIVE
sixpack.studio;server1.sixpack.studio:forks.forks.min 0
sixpack.studio;server1.sixpack.studio:forks.forks.label forks
sixpack.studio;server1.sixpack.studio:forks.forks.max 100000
sixpack.studio;server1.sixpack.studio:forks.forks.graph_data_size normal
sixpack.studio;server1.sixpack.studio:exim_mailstats.graph_title Exim mail throughput
sixpack.studio;server1.sixpack.studio:exim_mailstats.graph_args --base 1000 -l 0
sixpack.studio;server1.sixpack.studio:exim_mailstats.graph_vlabel mails/${graph_period}
sixpack.studio;server1.sixpack.studio:exim_mailstats.graph_scale no
sixpack.studio;server1.sixpack.studio:exim_mailstats.graph_category exim
sixpack.studio;server1.sixpack.studio:exim_mailstats.graph_order received completed rejected
sixpack.studio;server1.sixpack.studio:exim_mailstats.completed.update_rate 300
sixpack.studio;server1.sixpack.studio:exim_mailstats.completed.type DERIVE
sixpack.studio;server1.sixpack.studio:exim_mailstats.completed.min 0
sixpack.studio;server1.sixpack.studio:exim_mailstats.completed.graph_data_size normal
sixpack.studio;server1.sixpack.studio:exim_mailstats.completed.label completed
sixpack.studio;server1.sixpack.studio:exim_mailstats.rejected.graph_data_size normal
sixpack.studio;server1.sixpack.studio:exim_mailstats.rejected.label rejected
sixpack.studio;server1.sixpack.studio:exim_mailstats.rejected.min 0
sixpack.studio;server1.sixpack.studio:exim_mailstats.rejected.type DERIVE
sixpack.studio;server1.sixpack.studio:exim_mailstats.rejected.update_rate 300
sixpack.studio;server1.sixpack.studio:exim_mailstats.received.min 0
sixpack.studio;server1.sixpack.studio:exim_mailstats.received.update_rate 300
sixpack.studio;server1.sixpack.studio:exim_mailstats.received.type DERIVE
sixpack.studio;server1.sixpack.studio:exim_mailstats.received.graph_data_size normal
sixpack.studio;server1.sixpack.studio:exim_mailstats.received.label received
sixpack.studio;server1.sixpack.studio:exim_mailstats.received.draw AREA
sixpack.studio;server1.sixpack.studio:diskstats_throughput.sr0.graph_title Disk throughput for /dev/sr0
sixpack.studio;server1.sixpack.studio:diskstats_throughput.sr0.graph_args --base 1024
sixpack.studio;server1.sixpack.studio:diskstats_throughput.sr0.graph_vlabel Pr ${graph_period} read (-) / write (+)
sixpack.studio;server1.sixpack.studio:diskstats_throughput.sr0.graph_category disk
sixpack.studio;server1.sixpack.studio:diskstats_throughput.sr0.graph_info This graph shows disk throughput in bytes pr ${graph_period}. The graph base is 1024 so KB is for Kibi bytes and so on.
sixpack.studio;server1.sixpack.studio:diskstats_throughput.sr0.graph_order rdbytes wrbytes
sixpack.studio;server1.sixpack.studio:diskstats_throughput.sr0.rdbytes.type GAUGE
sixpack.studio;server1.sixpack.studio:diskstats_throughput.sr0.rdbytes.graph no
sixpack.studio;server1.sixpack.studio:diskstats_throughput.sr0.rdbytes.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_throughput.sr0.rdbytes.min 0
sixpack.studio;server1.sixpack.studio:diskstats_throughput.sr0.rdbytes.draw LINE1
sixpack.studio;server1.sixpack.studio:diskstats_throughput.sr0.rdbytes.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_throughput.sr0.rdbytes.label invisible
sixpack.studio;server1.sixpack.studio:diskstats_throughput.sr0.wrbytes.label Bytes
sixpack.studio;server1.sixpack.studio:diskstats_throughput.sr0.wrbytes.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_throughput.sr0.wrbytes.negative rdbytes
sixpack.studio;server1.sixpack.studio:diskstats_throughput.sr0.wrbytes.draw LINE1
sixpack.studio;server1.sixpack.studio:diskstats_throughput.sr0.wrbytes.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_throughput.sr0.wrbytes.type GAUGE
sixpack.studio;server1.sixpack.studio:diskstats_throughput.sr0.wrbytes.min 0
sixpack.studio;server1.sixpack.studio:open_inodes.graph_title Inode table usage
sixpack.studio;server1.sixpack.studio:open_inodes.graph_args --base 1000 -l 0
sixpack.studio;server1.sixpack.studio:open_inodes.graph_vlabel number of open inodes
sixpack.studio;server1.sixpack.studio:open_inodes.graph_category system
sixpack.studio;server1.sixpack.studio:open_inodes.graph_info This graph monitors the Linux open inode table.
sixpack.studio;server1.sixpack.studio:open_inodes.graph_order used max
sixpack.studio;server1.sixpack.studio:open_inodes.max.graph_data_size normal
sixpack.studio;server1.sixpack.studio:open_inodes.max.label inode table size
sixpack.studio;server1.sixpack.studio:open_inodes.max.info The size of the system inode table. This is dynamically adjusted by the kernel.
sixpack.studio;server1.sixpack.studio:open_inodes.max.update_rate 300
sixpack.studio;server1.sixpack.studio:open_inodes.used.info The number of currently open inodes.
sixpack.studio;server1.sixpack.studio:open_inodes.used.update_rate 300
sixpack.studio;server1.sixpack.studio:open_inodes.used.graph_data_size normal
sixpack.studio;server1.sixpack.studio:open_inodes.used.label open inodes
sixpack.studio;server1.sixpack.studio:load.graph_title Load average
sixpack.studio;server1.sixpack.studio:load.graph_args --base 1000 -l 0
sixpack.studio;server1.sixpack.studio:load.graph_vlabel load
sixpack.studio;server1.sixpack.studio:load.graph_scale no
sixpack.studio;server1.sixpack.studio:load.graph_category system
sixpack.studio;server1.sixpack.studio:load.graph_info The load average of the machine describes how many processes are in the run-queue (scheduled to run "immediately").
sixpack.studio;server1.sixpack.studio:load.graph_order load
sixpack.studio;server1.sixpack.studio:load.load.info 5 minute load average
sixpack.studio;server1.sixpack.studio:load.load.update_rate 300
sixpack.studio;server1.sixpack.studio:load.load.graph_data_size normal
sixpack.studio;server1.sixpack.studio:load.load.label load
sixpack.studio;server1.sixpack.studio:mysql_queries.graph_title MySQL queries
sixpack.studio;server1.sixpack.studio:mysql_queries.graph_args --base 1000
sixpack.studio;server1.sixpack.studio:mysql_queries.graph_vlabel queries / ${graph_period}
sixpack.studio;server1.sixpack.studio:mysql_queries.graph_category mysql
sixpack.studio;server1.sixpack.studio:mysql_queries.graph_info Note that this is a old plugin which is no longer installed by default. It is retained for compatability with old installations.
sixpack.studio;server1.sixpack.studio:mysql_queries.graph_total total
sixpack.studio;server1.sixpack.studio:mysql_queries.graph_order select delete insert update replace cache_hits
sixpack.studio;server1.sixpack.studio:mysql_queries.insert.draw STACK
sixpack.studio;server1.sixpack.studio:mysql_queries.insert.graph_data_size normal
sixpack.studio;server1.sixpack.studio:mysql_queries.insert.max 500000
sixpack.studio;server1.sixpack.studio:mysql_queries.insert.label insert
sixpack.studio;server1.sixpack.studio:mysql_queries.insert.min 0
sixpack.studio;server1.sixpack.studio:mysql_queries.insert.type DERIVE
sixpack.studio;server1.sixpack.studio:mysql_queries.insert.update_rate 300
sixpack.studio;server1.sixpack.studio:mysql_queries.select.graph_data_size normal
sixpack.studio;server1.sixpack.studio:mysql_queries.select.label select
sixpack.studio;server1.sixpack.studio:mysql_queries.select.max 500000
sixpack.studio;server1.sixpack.studio:mysql_queries.select.draw AREA
sixpack.studio;server1.sixpack.studio:mysql_queries.select.update_rate 300
sixpack.studio;server1.sixpack.studio:mysql_queries.select.type DERIVE
sixpack.studio;server1.sixpack.studio:mysql_queries.select.min 0
sixpack.studio;server1.sixpack.studio:mysql_queries.delete.type DERIVE
sixpack.studio;server1.sixpack.studio:mysql_queries.delete.update_rate 300
sixpack.studio;server1.sixpack.studio:mysql_queries.delete.min 0
sixpack.studio;server1.sixpack.studio:mysql_queries.delete.draw STACK
sixpack.studio;server1.sixpack.studio:mysql_queries.delete.max 500000
sixpack.studio;server1.sixpack.studio:mysql_queries.delete.label delete
sixpack.studio;server1.sixpack.studio:mysql_queries.delete.graph_data_size normal
sixpack.studio;server1.sixpack.studio:mysql_queries.update.draw STACK
sixpack.studio;server1.sixpack.studio:mysql_queries.update.graph_data_size normal
sixpack.studio;server1.sixpack.studio:mysql_queries.update.label update
sixpack.studio;server1.sixpack.studio:mysql_queries.update.max 500000
sixpack.studio;server1.sixpack.studio:mysql_queries.update.type DERIVE
sixpack.studio;server1.sixpack.studio:mysql_queries.update.update_rate 300
sixpack.studio;server1.sixpack.studio:mysql_queries.update.min 0
sixpack.studio;server1.sixpack.studio:mysql_queries.cache_hits.max 500000
sixpack.studio;server1.sixpack.studio:mysql_queries.cache_hits.label cache_hits
sixpack.studio;server1.sixpack.studio:mysql_queries.cache_hits.graph_data_size normal
sixpack.studio;server1.sixpack.studio:mysql_queries.cache_hits.draw STACK
sixpack.studio;server1.sixpack.studio:mysql_queries.cache_hits.min 0
sixpack.studio;server1.sixpack.studio:mysql_queries.cache_hits.update_rate 300
sixpack.studio;server1.sixpack.studio:mysql_queries.cache_hits.type DERIVE
sixpack.studio;server1.sixpack.studio:mysql_queries.replace.draw STACK
sixpack.studio;server1.sixpack.studio:mysql_queries.replace.graph_data_size normal
sixpack.studio;server1.sixpack.studio:mysql_queries.replace.max 500000
sixpack.studio;server1.sixpack.studio:mysql_queries.replace.label replace
sixpack.studio;server1.sixpack.studio:mysql_queries.replace.min 0
sixpack.studio;server1.sixpack.studio:mysql_queries.replace.type DERIVE
sixpack.studio;server1.sixpack.studio:mysql_queries.replace.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_throughput.loop0.graph_title Disk throughput for /dev/loop0
sixpack.studio;server1.sixpack.studio:diskstats_throughput.loop0.graph_args --base 1024
sixpack.studio;server1.sixpack.studio:diskstats_throughput.loop0.graph_vlabel Pr ${graph_period} read (-) / write (+)
sixpack.studio;server1.sixpack.studio:diskstats_throughput.loop0.graph_category disk
sixpack.studio;server1.sixpack.studio:diskstats_throughput.loop0.graph_info This graph shows disk throughput in bytes pr ${graph_period}. The graph base is 1024 so KB is for Kibi bytes and so on.
sixpack.studio;server1.sixpack.studio:diskstats_throughput.loop0.graph_order rdbytes wrbytes
sixpack.studio;server1.sixpack.studio:diskstats_throughput.loop0.rdbytes.label invisible
sixpack.studio;server1.sixpack.studio:diskstats_throughput.loop0.rdbytes.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_throughput.loop0.rdbytes.draw LINE1
sixpack.studio;server1.sixpack.studio:diskstats_throughput.loop0.rdbytes.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_throughput.loop0.rdbytes.graph no
sixpack.studio;server1.sixpack.studio:diskstats_throughput.loop0.rdbytes.type GAUGE
sixpack.studio;server1.sixpack.studio:diskstats_throughput.loop0.rdbytes.min 0
sixpack.studio;server1.sixpack.studio:diskstats_throughput.loop0.wrbytes.draw LINE1
sixpack.studio;server1.sixpack.studio:diskstats_throughput.loop0.wrbytes.negative rdbytes
sixpack.studio;server1.sixpack.studio:diskstats_throughput.loop0.wrbytes.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_throughput.loop0.wrbytes.label Bytes
sixpack.studio;server1.sixpack.studio:diskstats_throughput.loop0.wrbytes.type GAUGE
sixpack.studio;server1.sixpack.studio:diskstats_throughput.loop0.wrbytes.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_throughput.loop0.wrbytes.min 0
sixpack.studio;server1.sixpack.studio:proc_pri.graph_title Processes priority
sixpack.studio;server1.sixpack.studio:proc_pri.graph_order low high locked high low locked
sixpack.studio;server1.sixpack.studio:proc_pri.graph_category processes
sixpack.studio;server1.sixpack.studio:proc_pri.graph_info This graph shows number of processes at each priority
sixpack.studio;server1.sixpack.studio:proc_pri.graph_args --base 1000 -l 0
sixpack.studio;server1.sixpack.studio:proc_pri.graph_vlabel Number of processes
sixpack.studio;server1.sixpack.studio:proc_pri.locked.draw STACK
sixpack.studio;server1.sixpack.studio:proc_pri.locked.graph_data_size normal
sixpack.studio;server1.sixpack.studio:proc_pri.locked.label locked in memory
sixpack.studio;server1.sixpack.studio:proc_pri.locked.info The number of processes that have pages locked into memory (for real-time and custom IO)
sixpack.studio;server1.sixpack.studio:proc_pri.locked.update_rate 300
sixpack.studio;server1.sixpack.studio:proc_pri.high.info The number of high-priority processes (tasks)
sixpack.studio;server1.sixpack.studio:proc_pri.high.update_rate 300
sixpack.studio;server1.sixpack.studio:proc_pri.high.graph_data_size normal
sixpack.studio;server1.sixpack.studio:proc_pri.high.label high priority
sixpack.studio;server1.sixpack.studio:proc_pri.high.draw STACK
sixpack.studio;server1.sixpack.studio:proc_pri.low.graph_data_size normal
sixpack.studio;server1.sixpack.studio:proc_pri.low.label low priority
sixpack.studio;server1.sixpack.studio:proc_pri.low.draw AREA
sixpack.studio;server1.sixpack.studio:proc_pri.low.info The number of low-priority processes (tasks)
sixpack.studio;server1.sixpack.studio:proc_pri.low.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_throughput.graph_title Throughput per device
sixpack.studio;server1.sixpack.studio:diskstats_throughput.graph_args --base 1024
sixpack.studio;server1.sixpack.studio:diskstats_throughput.graph_vlabel Bytes/${graph_period} read (-) / write (+)
sixpack.studio;server1.sixpack.studio:diskstats_throughput.graph_category disk
sixpack.studio;server1.sixpack.studio:diskstats_throughput.graph_width 400
sixpack.studio;server1.sixpack.studio:diskstats_throughput.graph_info This graph shows averaged throughput for the given disk in bytes. Higher throughput is usualy linked with higher service time/latency (separate graph). The graph base is 1024 yeilding Kibi- and Mebi-bytes.
sixpack.studio;server1.sixpack.studio:diskstats_throughput.graph_order loop0_rdbytes loop0_wrbytes sr0_rdbytes sr0_wrbytes vda_rdbytes vda_wrbytes
sixpack.studio;server1.sixpack.studio:diskstats_throughput.sr0_wrbytes.draw LINE1
sixpack.studio;server1.sixpack.studio:diskstats_throughput.sr0_wrbytes.negative sr0_rdbytes
sixpack.studio;server1.sixpack.studio:diskstats_throughput.sr0_wrbytes.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_throughput.sr0_wrbytes.label sr0
sixpack.studio;server1.sixpack.studio:diskstats_throughput.sr0_wrbytes.min 0
sixpack.studio;server1.sixpack.studio:diskstats_throughput.sr0_wrbytes.type GAUGE
sixpack.studio;server1.sixpack.studio:diskstats_throughput.sr0_wrbytes.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_throughput.vda_wrbytes.min 0
sixpack.studio;server1.sixpack.studio:diskstats_throughput.vda_wrbytes.type GAUGE
sixpack.studio;server1.sixpack.studio:diskstats_throughput.vda_wrbytes.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_throughput.vda_wrbytes.negative vda_rdbytes
sixpack.studio;server1.sixpack.studio:diskstats_throughput.vda_wrbytes.draw LINE1
sixpack.studio;server1.sixpack.studio:diskstats_throughput.vda_wrbytes.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_throughput.vda_wrbytes.label vda
sixpack.studio;server1.sixpack.studio:diskstats_throughput.loop0_rdbytes.min 0
sixpack.studio;server1.sixpack.studio:diskstats_throughput.loop0_rdbytes.type GAUGE
sixpack.studio;server1.sixpack.studio:diskstats_throughput.loop0_rdbytes.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_throughput.loop0_rdbytes.graph no
sixpack.studio;server1.sixpack.studio:diskstats_throughput.loop0_rdbytes.draw LINE1
sixpack.studio;server1.sixpack.studio:diskstats_throughput.loop0_rdbytes.label loop0
sixpack.studio;server1.sixpack.studio:diskstats_throughput.loop0_rdbytes.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_throughput.vda_rdbytes.type GAUGE
sixpack.studio;server1.sixpack.studio:diskstats_throughput.vda_rdbytes.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_throughput.vda_rdbytes.graph no
sixpack.studio;server1.sixpack.studio:diskstats_throughput.vda_rdbytes.min 0
sixpack.studio;server1.sixpack.studio:diskstats_throughput.vda_rdbytes.draw LINE1
sixpack.studio;server1.sixpack.studio:diskstats_throughput.vda_rdbytes.label vda
sixpack.studio;server1.sixpack.studio:diskstats_throughput.vda_rdbytes.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_throughput.loop0_wrbytes.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_throughput.loop0_wrbytes.label loop0
sixpack.studio;server1.sixpack.studio:diskstats_throughput.loop0_wrbytes.negative loop0_rdbytes
sixpack.studio;server1.sixpack.studio:diskstats_throughput.loop0_wrbytes.draw LINE1
sixpack.studio;server1.sixpack.studio:diskstats_throughput.loop0_wrbytes.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_throughput.loop0_wrbytes.type GAUGE
sixpack.studio;server1.sixpack.studio:diskstats_throughput.loop0_wrbytes.min 0
sixpack.studio;server1.sixpack.studio:diskstats_throughput.sr0_rdbytes.min 0
sixpack.studio;server1.sixpack.studio:diskstats_throughput.sr0_rdbytes.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_throughput.sr0_rdbytes.graph no
sixpack.studio;server1.sixpack.studio:diskstats_throughput.sr0_rdbytes.type GAUGE
sixpack.studio;server1.sixpack.studio:diskstats_throughput.sr0_rdbytes.label sr0
sixpack.studio;server1.sixpack.studio:diskstats_throughput.sr0_rdbytes.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_throughput.sr0_rdbytes.draw LINE1
sixpack.studio;server1.sixpack.studio:interrupts.graph_title Interrupts and context switches
sixpack.studio;server1.sixpack.studio:interrupts.graph_args --base 1000 -l 0
sixpack.studio;server1.sixpack.studio:interrupts.graph_vlabel interrupts & ctx switches / ${graph_period}
sixpack.studio;server1.sixpack.studio:interrupts.graph_category system
sixpack.studio;server1.sixpack.studio:interrupts.graph_info This graph shows the number of interrupts and context switches on the system. These are typically high on a busy system.
sixpack.studio;server1.sixpack.studio:interrupts.graph_order intr ctx
sixpack.studio;server1.sixpack.studio:interrupts.intr.label interrupts
sixpack.studio;server1.sixpack.studio:interrupts.intr.graph_data_size normal
sixpack.studio;server1.sixpack.studio:interrupts.intr.type DERIVE
sixpack.studio;server1.sixpack.studio:interrupts.intr.update_rate 300
sixpack.studio;server1.sixpack.studio:interrupts.intr.info Interrupts are events that alter sequence of instructions executed by a processor. They can come from either hardware (exceptions, NMI, IRQ) or software.
sixpack.studio;server1.sixpack.studio:interrupts.intr.min 0
sixpack.studio;server1.sixpack.studio:interrupts.ctx.label context switches
sixpack.studio;server1.sixpack.studio:interrupts.ctx.graph_data_size normal
sixpack.studio;server1.sixpack.studio:interrupts.ctx.update_rate 300
sixpack.studio;server1.sixpack.studio:interrupts.ctx.info A context switch occurs when a multitasking operatings system suspends the currently running process, and starts executing another.
sixpack.studio;server1.sixpack.studio:interrupts.ctx.type DERIVE
sixpack.studio;server1.sixpack.studio:interrupts.ctx.min 0
sixpack.studio;server1.sixpack.studio:uptime.graph_title Uptime
sixpack.studio;server1.sixpack.studio:uptime.graph_args --base 1000 -l 0
sixpack.studio;server1.sixpack.studio:uptime.graph_scale no
sixpack.studio;server1.sixpack.studio:uptime.graph_vlabel uptime in days
sixpack.studio;server1.sixpack.studio:uptime.graph_category system
sixpack.studio;server1.sixpack.studio:uptime.graph_order uptime
sixpack.studio;server1.sixpack.studio:uptime.uptime.update_rate 300
sixpack.studio;server1.sixpack.studio:uptime.uptime.label uptime
sixpack.studio;server1.sixpack.studio:uptime.uptime.graph_data_size normal
sixpack.studio;server1.sixpack.studio:uptime.uptime.draw AREA
sixpack.studio;server1.sixpack.studio:processes.graph_title Processes
sixpack.studio;server1.sixpack.studio:processes.graph_info This graph shows the number of processes
sixpack.studio;server1.sixpack.studio:processes.graph_category processes
sixpack.studio;server1.sixpack.studio:processes.graph_args --base 1000 -l 0
sixpack.studio;server1.sixpack.studio:processes.graph_vlabel Number of processes
sixpack.studio;server1.sixpack.studio:processes.graph_order sleeping idle stopped zombie dead paging uninterruptible runnable processes dead paging idle sleeping uninterruptible stopped runnable zombie processes
sixpack.studio;server1.sixpack.studio:processes.zombie.colour 990000
sixpack.studio;server1.sixpack.studio:processes.zombie.graph_data_size normal
sixpack.studio;server1.sixpack.studio:processes.zombie.label zombie
sixpack.studio;server1.sixpack.studio:processes.zombie.draw STACK
sixpack.studio;server1.sixpack.studio:processes.zombie.info The number of defunct ('zombie') processes (process terminated and parent not waiting).
sixpack.studio;server1.sixpack.studio:processes.zombie.update_rate 300
sixpack.studio;server1.sixpack.studio:processes.paging.graph_data_size normal
sixpack.studio;server1.sixpack.studio:processes.paging.label paging
sixpack.studio;server1.sixpack.studio:processes.paging.draw STACK
sixpack.studio;server1.sixpack.studio:processes.paging.colour 00aaaa
sixpack.studio;server1.sixpack.studio:processes.paging.info The number of paging processes (<2.6 kernels only).
sixpack.studio;server1.sixpack.studio:processes.paging.update_rate 300
sixpack.studio;server1.sixpack.studio:processes.dead.draw STACK
sixpack.studio;server1.sixpack.studio:processes.dead.label dead
sixpack.studio;server1.sixpack.studio:processes.dead.graph_data_size normal
sixpack.studio;server1.sixpack.studio:processes.dead.colour ff0000
sixpack.studio;server1.sixpack.studio:processes.dead.update_rate 300
sixpack.studio;server1.sixpack.studio:processes.dead.info The number of dead processes.
sixpack.studio;server1.sixpack.studio:processes.stopped.info The number of stopped or traced processes.
sixpack.studio;server1.sixpack.studio:processes.stopped.update_rate 300
sixpack.studio;server1.sixpack.studio:processes.stopped.colour cc0000
sixpack.studio;server1.sixpack.studio:processes.stopped.graph_data_size normal
sixpack.studio;server1.sixpack.studio:processes.stopped.label stopped
sixpack.studio;server1.sixpack.studio:processes.stopped.draw STACK
sixpack.studio;server1.sixpack.studio:processes.idle.info The number of idle kernel threads (>= 4.2 kernels only).
sixpack.studio;server1.sixpack.studio:processes.idle.update_rate 300
sixpack.studio;server1.sixpack.studio:processes.idle.draw STACK
sixpack.studio;server1.sixpack.studio:processes.idle.graph_data_size normal
sixpack.studio;server1.sixpack.studio:processes.idle.label idle
sixpack.studio;server1.sixpack.studio:processes.idle.colour 4169e1
sixpack.studio;server1.sixpack.studio:processes.runnable.colour 22ff22
sixpack.studio;server1.sixpack.studio:processes.runnable.label runnable
sixpack.studio;server1.sixpack.studio:processes.runnable.graph_data_size normal
sixpack.studio;server1.sixpack.studio:processes.runnable.draw STACK
sixpack.studio;server1.sixpack.studio:processes.runnable.update_rate 300
sixpack.studio;server1.sixpack.studio:processes.runnable.info The number of runnable processes (on the run queue).
sixpack.studio;server1.sixpack.studio:processes.uninterruptible.draw STACK
sixpack.studio;server1.sixpack.studio:processes.uninterruptible.graph_data_size normal
sixpack.studio;server1.sixpack.studio:processes.uninterruptible.label uninterruptible
sixpack.studio;server1.sixpack.studio:processes.uninterruptible.colour ffa500
sixpack.studio;server1.sixpack.studio:processes.uninterruptible.info The number of uninterruptible processes (usually IO).
sixpack.studio;server1.sixpack.studio:processes.uninterruptible.update_rate 300
sixpack.studio;server1.sixpack.studio:processes.processes.update_rate 300
sixpack.studio;server1.sixpack.studio:processes.processes.info The total number of processes.
sixpack.studio;server1.sixpack.studio:processes.processes.label total
sixpack.studio;server1.sixpack.studio:processes.processes.graph_data_size normal
sixpack.studio;server1.sixpack.studio:processes.processes.draw LINE1
sixpack.studio;server1.sixpack.studio:processes.processes.colour c0c0c0
sixpack.studio;server1.sixpack.studio:processes.sleeping.info The number of sleeping processes.
sixpack.studio;server1.sixpack.studio:processes.sleeping.update_rate 300
sixpack.studio;server1.sixpack.studio:processes.sleeping.draw AREA
sixpack.studio;server1.sixpack.studio:processes.sleeping.graph_data_size normal
sixpack.studio;server1.sixpack.studio:processes.sleeping.label sleeping
sixpack.studio;server1.sixpack.studio:processes.sleeping.colour 0022ff
sixpack.studio;server1.sixpack.studio:irqstats.graph_title Individual interrupts
sixpack.studio;server1.sixpack.studio:irqstats.graph_args --base 1000 --logarithmic
sixpack.studio;server1.sixpack.studio:irqstats.graph_vlabel interrupts / ${graph_period}
sixpack.studio;server1.sixpack.studio:irqstats.graph_category system
sixpack.studio;server1.sixpack.studio:irqstats.graph_info Shows the number of different IRQs received by the kernel. High disk or network traffic can cause a high number of interrupts (with good hardware and drivers this will be less so). Sudden high interrupt activity with no associated higher system activity is not normal.
sixpack.studio;server1.sixpack.studio:irqstats.graph_order i0 i1 i4 i8 i9 i11 i12 i14 i15 i24 i25 i26 i27 i28 i29 i30 iNMI iLOC iSPU iPMI iIWI iRTR iRES iCAL iTLB iTRM iTHR iDFR iMCE iMCP iHYP iHRE iHVS iERR iMIS iPIN iNPI iPIW i0 i1 i4 i8 i9 i11 i12 i14 i15 i24 i25 i26 i27 i28 i29 i30 iNMI iLOC iSPU iPMI iIWI iRTR iRES iCAL iTLB iTRM iTHR iDFR iMCE iMCP iHYP iHRE iHVS iERR iMIS iPIN iNPI iPIW
sixpack.studio;server1.sixpack.studio:irqstats.iTRM.graph_data_size normal
sixpack.studio;server1.sixpack.studio:irqstats.iTRM.label Thermal event interrupts
sixpack.studio;server1.sixpack.studio:irqstats.iTRM.min 0
sixpack.studio;server1.sixpack.studio:irqstats.iTRM.info Interrupt TRM, for device(s): Thermal event interrupts
sixpack.studio;server1.sixpack.studio:irqstats.iTRM.update_rate 300
sixpack.studio;server1.sixpack.studio:irqstats.iTRM.type DERIVE
sixpack.studio;server1.sixpack.studio:irqstats.i28.info Interrupt 28, for device(s): 49152-edge virtio0-config
sixpack.studio;server1.sixpack.studio:irqstats.i28.update_rate 300
sixpack.studio;server1.sixpack.studio:irqstats.i28.type DERIVE
sixpack.studio;server1.sixpack.studio:irqstats.i28.min 0
sixpack.studio;server1.sixpack.studio:irqstats.i28.graph_data_size normal
sixpack.studio;server1.sixpack.studio:irqstats.i28.label 49152-edge virtio0-config
sixpack.studio;server1.sixpack.studio:irqstats.i9.label 9-fasteoi acpi
sixpack.studio;server1.sixpack.studio:irqstats.i9.graph_data_size normal
sixpack.studio;server1.sixpack.studio:irqstats.i9.min 0
sixpack.studio;server1.sixpack.studio:irqstats.i9.update_rate 300
sixpack.studio;server1.sixpack.studio:irqstats.i9.info Interrupt 9, for device(s): 9-fasteoi acpi
sixpack.studio;server1.sixpack.studio:irqstats.i9.type DERIVE
sixpack.studio;server1.sixpack.studio:irqstats.i25.label 81921-edge virtio2-req.0
sixpack.studio;server1.sixpack.studio:irqstats.i25.graph_data_size normal
sixpack.studio;server1.sixpack.studio:irqstats.i25.type DERIVE
sixpack.studio;server1.sixpack.studio:irqstats.i25.update_rate 300
sixpack.studio;server1.sixpack.studio:irqstats.i25.info Interrupt 25, for device(s): 81921-edge virtio2-req.0
sixpack.studio;server1.sixpack.studio:irqstats.i25.min 0
sixpack.studio;server1.sixpack.studio:irqstats.i27.min 0
sixpack.studio;server1.sixpack.studio:irqstats.i27.update_rate 300
sixpack.studio;server1.sixpack.studio:irqstats.i27.info Interrupt 27, for device(s): 65537-edge virtio1-virtqueues
sixpack.studio;server1.sixpack.studio:irqstats.i27.type DERIVE
sixpack.studio;server1.sixpack.studio:irqstats.i27.label 65537-edge virtio1-virtqueues
sixpack.studio;server1.sixpack.studio:irqstats.i27.graph_data_size normal
sixpack.studio;server1.sixpack.studio:irqstats.i14.type DERIVE
sixpack.studio;server1.sixpack.studio:irqstats.i14.info Interrupt 14, for device(s): 14-edge ata_piix
sixpack.studio;server1.sixpack.studio:irqstats.i14.update_rate 300
sixpack.studio;server1.sixpack.studio:irqstats.i14.min 0
sixpack.studio;server1.sixpack.studio:irqstats.i14.graph_data_size normal
sixpack.studio;server1.sixpack.studio:irqstats.i14.label 14-edge ata_piix
sixpack.studio;server1.sixpack.studio:irqstats.i4.label 4-edge ttyS0
sixpack.studio;server1.sixpack.studio:irqstats.i4.graph_data_size normal
sixpack.studio;server1.sixpack.studio:irqstats.i4.min 0
sixpack.studio;server1.sixpack.studio:irqstats.i4.type DERIVE
sixpack.studio;server1.sixpack.studio:irqstats.i4.update_rate 300
sixpack.studio;server1.sixpack.studio:irqstats.i4.info Interrupt 4, for device(s): 4-edge ttyS0
sixpack.studio;server1.sixpack.studio:irqstats.iDFR.type DERIVE
sixpack.studio;server1.sixpack.studio:irqstats.iDFR.update_rate 300
sixpack.studio;server1.sixpack.studio:irqstats.iDFR.info Interrupt DFR, for device(s): Deferred Error APIC interrupts
sixpack.studio;server1.sixpack.studio:irqstats.iDFR.min 0
sixpack.studio;server1.sixpack.studio:irqstats.iDFR.label Deferred Error APIC interrupts
sixpack.studio;server1.sixpack.studio:irqstats.iDFR.graph_data_size normal
sixpack.studio;server1.sixpack.studio:irqstats.iTLB.min 0
sixpack.studio;server1.sixpack.studio:irqstats.iTLB.info Interrupt TLB, for device(s): TLB shootdowns
sixpack.studio;server1.sixpack.studio:irqstats.iTLB.update_rate 300
sixpack.studio;server1.sixpack.studio:irqstats.iTLB.type DERIVE
sixpack.studio;server1.sixpack.studio:irqstats.iTLB.graph_data_size normal
sixpack.studio;server1.sixpack.studio:irqstats.iTLB.label TLB shootdowns
sixpack.studio;server1.sixpack.studio:irqstats.iCAL.graph_data_size normal
sixpack.studio;server1.sixpack.studio:irqstats.iCAL.label Function call interrupts
sixpack.studio;server1.sixpack.studio:irqstats.iCAL.min 0
sixpack.studio;server1.sixpack.studio:irqstats.iCAL.info Interrupt CAL, for device(s): Function call interrupts
sixpack.studio;server1.sixpack.studio:irqstats.iCAL.update_rate 300
sixpack.studio;server1.sixpack.studio:irqstats.iCAL.type DERIVE
sixpack.studio;server1.sixpack.studio:irqstats.iERR.label ERR
sixpack.studio;server1.sixpack.studio:irqstats.iERR.graph_data_size normal
sixpack.studio;server1.sixpack.studio:irqstats.iERR.update_rate 300
sixpack.studio;server1.sixpack.studio:irqstats.iERR.type DERIVE
sixpack.studio;server1.sixpack.studio:irqstats.iERR.min 0
sixpack.studio;server1.sixpack.studio:irqstats.iNMI.label Non-maskable interrupts
sixpack.studio;server1.sixpack.studio:irqstats.iNMI.graph_data_size normal
sixpack.studio;server1.sixpack.studio:irqstats.iNMI.update_rate 300
sixpack.studio;server1.sixpack.studio:irqstats.iNMI.info Interrupt NMI, for device(s): Non-maskable interrupts
sixpack.studio;server1.sixpack.studio:irqstats.iNMI.type DERIVE
sixpack.studio;server1.sixpack.studio:irqstats.iNMI.min 0
sixpack.studio;server1.sixpack.studio:irqstats.i11.min 0
sixpack.studio;server1.sixpack.studio:irqstats.i11.type DERIVE
sixpack.studio;server1.sixpack.studio:irqstats.i11.info Interrupt 11, for device(s): 11-fasteoi uhci_hcd:usb1, virtio3
sixpack.studio;server1.sixpack.studio:irqstats.i11.update_rate 300
sixpack.studio;server1.sixpack.studio:irqstats.i11.graph_data_size normal
sixpack.studio;server1.sixpack.studio:irqstats.i11.label 11-fasteoi uhci_hcd:usb1, virtio3
sixpack.studio;server1.sixpack.studio:irqstats.iMCE.label Machine check exceptions
sixpack.studio;server1.sixpack.studio:irqstats.iMCE.graph_data_size normal
sixpack.studio;server1.sixpack.studio:irqstats.iMCE.type DERIVE
sixpack.studio;server1.sixpack.studio:irqstats.iMCE.update_rate 300
sixpack.studio;server1.sixpack.studio:irqstats.iMCE.info Interrupt MCE, for device(s): Machine check exceptions
sixpack.studio;server1.sixpack.studio:irqstats.iMCE.min 0
sixpack.studio;server1.sixpack.studio:irqstats.iRTR.label APIC ICR read retries
sixpack.studio;server1.sixpack.studio:irqstats.iRTR.graph_data_size normal
sixpack.studio;server1.sixpack.studio:irqstats.iRTR.update_rate 300
sixpack.studio;server1.sixpack.studio:irqstats.iRTR.info Interrupt RTR, for device(s): APIC ICR read retries
sixpack.studio;server1.sixpack.studio:irqstats.iRTR.type DERIVE
sixpack.studio;server1.sixpack.studio:irqstats.iRTR.min 0
sixpack.studio;server1.sixpack.studio:irqstats.iPIN.label Posted-interrupt notification event
sixpack.studio;server1.sixpack.studio:irqstats.iPIN.graph_data_size normal
sixpack.studio;server1.sixpack.studio:irqstats.iPIN.type DERIVE
sixpack.studio;server1.sixpack.studio:irqstats.iPIN.update_rate 300
sixpack.studio;server1.sixpack.studio:irqstats.iPIN.info Interrupt PIN, for device(s): Posted-interrupt notification event
sixpack.studio;server1.sixpack.studio:irqstats.iPIN.min 0
sixpack.studio;server1.sixpack.studio:irqstats.iNPI.label Nested posted-interrupt event
sixpack.studio;server1.sixpack.studio:irqstats.iNPI.graph_data_size normal
sixpack.studio;server1.sixpack.studio:irqstats.iNPI.type DERIVE
sixpack.studio;server1.sixpack.studio:irqstats.iNPI.update_rate 300
sixpack.studio;server1.sixpack.studio:irqstats.iNPI.info Interrupt NPI, for device(s): Nested posted-interrupt event
sixpack.studio;server1.sixpack.studio:irqstats.iNPI.min 0
sixpack.studio;server1.sixpack.studio:irqstats.iTHR.min 0
sixpack.studio;server1.sixpack.studio:irqstats.iTHR.update_rate 300
sixpack.studio;server1.sixpack.studio:irqstats.iTHR.info Interrupt THR, for device(s): Threshold APIC interrupts
sixpack.studio;server1.sixpack.studio:irqstats.iTHR.type DERIVE
sixpack.studio;server1.sixpack.studio:irqstats.iTHR.label Threshold APIC interrupts
sixpack.studio;server1.sixpack.studio:irqstats.iTHR.graph_data_size normal
sixpack.studio;server1.sixpack.studio:irqstats.i0.info Interrupt 0, for device(s): 2-edge timer
sixpack.studio;server1.sixpack.studio:irqstats.i0.update_rate 300
sixpack.studio;server1.sixpack.studio:irqstats.i0.type DERIVE
sixpack.studio;server1.sixpack.studio:irqstats.i0.min 0
sixpack.studio;server1.sixpack.studio:irqstats.i0.graph_data_size normal
sixpack.studio;server1.sixpack.studio:irqstats.i0.label 2-edge timer
sixpack.studio;server1.sixpack.studio:irqstats.iPMI.type DERIVE
sixpack.studio;server1.sixpack.studio:irqstats.iPMI.info Interrupt PMI, for device(s): Performance monitoring interrupts
sixpack.studio;server1.sixpack.studio:irqstats.iPMI.update_rate 300
sixpack.studio;server1.sixpack.studio:irqstats.iPMI.min 0
sixpack.studio;server1.sixpack.studio:irqstats.iPMI.graph_data_size normal
sixpack.studio;server1.sixpack.studio:irqstats.iPMI.label Performance monitoring interrupts
sixpack.studio;server1.sixpack.studio:irqstats.iPIW.min 0
sixpack.studio;server1.sixpack.studio:irqstats.iPIW.info Interrupt PIW, for device(s): Posted-interrupt wakeup event
sixpack.studio;server1.sixpack.studio:irqstats.iPIW.update_rate 300
sixpack.studio;server1.sixpack.studio:irqstats.iPIW.type DERIVE
sixpack.studio;server1.sixpack.studio:irqstats.iPIW.graph_data_size normal
sixpack.studio;server1.sixpack.studio:irqstats.iPIW.label Posted-interrupt wakeup event
sixpack.studio;server1.sixpack.studio:irqstats.iHRE.type DERIVE
sixpack.studio;server1.sixpack.studio:irqstats.iHRE.update_rate 300
sixpack.studio;server1.sixpack.studio:irqstats.iHRE.info Interrupt HRE, for device(s): Hyper-V reenlightenment interrupts
sixpack.studio;server1.sixpack.studio:irqstats.iHRE.min 0
sixpack.studio;server1.sixpack.studio:irqstats.iHRE.label Hyper-V reenlightenment interrupts
sixpack.studio;server1.sixpack.studio:irqstats.iHRE.graph_data_size normal
sixpack.studio;server1.sixpack.studio:irqstats.iMIS.min 0
sixpack.studio;server1.sixpack.studio:irqstats.iMIS.update_rate 300
sixpack.studio;server1.sixpack.studio:irqstats.iMIS.type DERIVE
sixpack.studio;server1.sixpack.studio:irqstats.iMIS.label MIS
sixpack.studio;server1.sixpack.studio:irqstats.iMIS.graph_data_size normal
sixpack.studio;server1.sixpack.studio:irqstats.iSPU.type DERIVE
sixpack.studio;server1.sixpack.studio:irqstats.iSPU.update_rate 300
sixpack.studio;server1.sixpack.studio:irqstats.iSPU.info Interrupt SPU, for device(s): Spurious interrupts
sixpack.studio;server1.sixpack.studio:irqstats.iSPU.min 0
sixpack.studio;server1.sixpack.studio:irqstats.iSPU.label Spurious interrupts
sixpack.studio;server1.sixpack.studio:irqstats.iSPU.graph_data_size normal
sixpack.studio;server1.sixpack.studio:irqstats.i15.type DERIVE
sixpack.studio;server1.sixpack.studio:irqstats.i15.update_rate 300
sixpack.studio;server1.sixpack.studio:irqstats.i15.info Interrupt 15, for device(s): 15-edge ata_piix
sixpack.studio;server1.sixpack.studio:irqstats.i15.min 0
sixpack.studio;server1.sixpack.studio:irqstats.i15.label 15-edge ata_piix
sixpack.studio;server1.sixpack.studio:irqstats.i15.graph_data_size normal
sixpack.studio;server1.sixpack.studio:irqstats.iHYP.min 0
sixpack.studio;server1.sixpack.studio:irqstats.iHYP.info Interrupt HYP, for device(s): Hypervisor callback interrupts
sixpack.studio;server1.sixpack.studio:irqstats.iHYP.update_rate 300
sixpack.studio;server1.sixpack.studio:irqstats.iHYP.type DERIVE
sixpack.studio;server1.sixpack.studio:irqstats.iHYP.graph_data_size normal
sixpack.studio;server1.sixpack.studio:irqstats.iHYP.label Hypervisor callback interrupts
sixpack.studio;server1.sixpack.studio:irqstats.iMCP.graph_data_size normal
sixpack.studio;server1.sixpack.studio:irqstats.iMCP.label Machine check polls
sixpack.studio;server1.sixpack.studio:irqstats.iMCP.info Interrupt MCP, for device(s): Machine check polls
sixpack.studio;server1.sixpack.studio:irqstats.iMCP.update_rate 300
sixpack.studio;server1.sixpack.studio:irqstats.iMCP.type DERIVE
sixpack.studio;server1.sixpack.studio:irqstats.iMCP.min 0
sixpack.studio;server1.sixpack.studio:irqstats.i12.label 12-edge i8042
sixpack.studio;server1.sixpack.studio:irqstats.i12.graph_data_size normal
sixpack.studio;server1.sixpack.studio:irqstats.i12.update_rate 300
sixpack.studio;server1.sixpack.studio:irqstats.i12.info Interrupt 12, for device(s): 12-edge i8042
sixpack.studio;server1.sixpack.studio:irqstats.i12.type DERIVE
sixpack.studio;server1.sixpack.studio:irqstats.i12.min 0
sixpack.studio;server1.sixpack.studio:irqstats.i8.label 8-edge rtc0
sixpack.studio;server1.sixpack.studio:irqstats.i8.graph_data_size normal
sixpack.studio;server1.sixpack.studio:irqstats.i8.min 0
sixpack.studio;server1.sixpack.studio:irqstats.i8.update_rate 300
sixpack.studio;server1.sixpack.studio:irqstats.i8.info Interrupt 8, for device(s): 8-edge rtc0
sixpack.studio;server1.sixpack.studio:irqstats.i8.type DERIVE
sixpack.studio;server1.sixpack.studio:irqstats.i24.type DERIVE
sixpack.studio;server1.sixpack.studio:irqstats.i24.info Interrupt 24, for device(s): 81920-edge virtio2-config
sixpack.studio;server1.sixpack.studio:irqstats.i24.update_rate 300
sixpack.studio;server1.sixpack.studio:irqstats.i24.min 0
sixpack.studio;server1.sixpack.studio:irqstats.i24.graph_data_size normal
sixpack.studio;server1.sixpack.studio:irqstats.i24.label 81920-edge virtio2-config
sixpack.studio;server1.sixpack.studio:irqstats.i30.min 0
sixpack.studio;server1.sixpack.studio:irqstats.i30.info Interrupt 30, for device(s): 49154-edge virtio0-output.0
sixpack.studio;server1.sixpack.studio:irqstats.i30.update_rate 300
sixpack.studio;server1.sixpack.studio:irqstats.i30.type DERIVE
sixpack.studio;server1.sixpack.studio:irqstats.i30.graph_data_size normal
sixpack.studio;server1.sixpack.studio:irqstats.i30.label 49154-edge virtio0-output.0
sixpack.studio;server1.sixpack.studio:irqstats.iHVS.min 0
sixpack.studio;server1.sixpack.studio:irqstats.iHVS.type DERIVE
sixpack.studio;server1.sixpack.studio:irqstats.iHVS.info Interrupt HVS, for device(s): Hyper-V stimer0 interrupts
sixpack.studio;server1.sixpack.studio:irqstats.iHVS.update_rate 300
sixpack.studio;server1.sixpack.studio:irqstats.iHVS.graph_data_size normal
sixpack.studio;server1.sixpack.studio:irqstats.iHVS.label Hyper-V stimer0 interrupts
sixpack.studio;server1.sixpack.studio:irqstats.iRES.label Rescheduling interrupts
sixpack.studio;server1.sixpack.studio:irqstats.iRES.graph_data_size normal
sixpack.studio;server1.sixpack.studio:irqstats.iRES.min 0
sixpack.studio;server1.sixpack.studio:irqstats.iRES.type DERIVE
sixpack.studio;server1.sixpack.studio:irqstats.iRES.update_rate 300
sixpack.studio;server1.sixpack.studio:irqstats.iRES.info Interrupt RES, for device(s): Rescheduling interrupts
sixpack.studio;server1.sixpack.studio:irqstats.i29.graph_data_size normal
sixpack.studio;server1.sixpack.studio:irqstats.i29.label 49153-edge virtio0-input.0
sixpack.studio;server1.sixpack.studio:irqstats.i29.min 0
sixpack.studio;server1.sixpack.studio:irqstats.i29.type DERIVE
sixpack.studio;server1.sixpack.studio:irqstats.i29.info Interrupt 29, for device(s): 49153-edge virtio0-input.0
sixpack.studio;server1.sixpack.studio:irqstats.i29.update_rate 300
sixpack.studio;server1.sixpack.studio:irqstats.i1.min 0
sixpack.studio;server1.sixpack.studio:irqstats.i1.update_rate 300
sixpack.studio;server1.sixpack.studio:irqstats.i1.info Interrupt 1, for device(s): 1-edge i8042
sixpack.studio;server1.sixpack.studio:irqstats.i1.type DERIVE
sixpack.studio;server1.sixpack.studio:irqstats.i1.label 1-edge i8042
sixpack.studio;server1.sixpack.studio:irqstats.i1.graph_data_size normal
sixpack.studio;server1.sixpack.studio:irqstats.iIWI.label IRQ work interrupts
sixpack.studio;server1.sixpack.studio:irqstats.iIWI.graph_data_size normal
sixpack.studio;server1.sixpack.studio:irqstats.iIWI.min 0
sixpack.studio;server1.sixpack.studio:irqstats.iIWI.type DERIVE
sixpack.studio;server1.sixpack.studio:irqstats.iIWI.update_rate 300
sixpack.studio;server1.sixpack.studio:irqstats.iIWI.info Interrupt IWI, for device(s): IRQ work interrupts
sixpack.studio;server1.sixpack.studio:irqstats.iLOC.label Local timer interrupts
sixpack.studio;server1.sixpack.studio:irqstats.iLOC.graph_data_size normal
sixpack.studio;server1.sixpack.studio:irqstats.iLOC.min 0
sixpack.studio;server1.sixpack.studio:irqstats.iLOC.type DERIVE
sixpack.studio;server1.sixpack.studio:irqstats.iLOC.update_rate 300
sixpack.studio;server1.sixpack.studio:irqstats.iLOC.info Interrupt LOC, for device(s): Local timer interrupts
sixpack.studio;server1.sixpack.studio:irqstats.i26.label 65536-edge virtio1-config
sixpack.studio;server1.sixpack.studio:irqstats.i26.graph_data_size normal
sixpack.studio;server1.sixpack.studio:irqstats.i26.min 0
sixpack.studio;server1.sixpack.studio:irqstats.i26.type DERIVE
sixpack.studio;server1.sixpack.studio:irqstats.i26.update_rate 300
sixpack.studio;server1.sixpack.studio:irqstats.i26.info Interrupt 26, for device(s): 65536-edge virtio1-config
sixpack.studio;server1.sixpack.studio:entropy.graph_title Available entropy
sixpack.studio;server1.sixpack.studio:entropy.graph_args --base 1000 -l 0
sixpack.studio;server1.sixpack.studio:entropy.graph_vlabel entropy (bytes)
sixpack.studio;server1.sixpack.studio:entropy.graph_scale no
sixpack.studio;server1.sixpack.studio:entropy.graph_category system
sixpack.studio;server1.sixpack.studio:entropy.graph_info This graph shows the amount of entropy available in the system.
sixpack.studio;server1.sixpack.studio:entropy.graph_order entropy
sixpack.studio;server1.sixpack.studio:entropy.entropy.info The number of random bytes available. This is typically used by cryptographic applications.
sixpack.studio;server1.sixpack.studio:entropy.entropy.update_rate 300
sixpack.studio;server1.sixpack.studio:entropy.entropy.graph_data_size normal
sixpack.studio;server1.sixpack.studio:entropy.entropy.label entropy
sixpack.studio;server1.sixpack.studio:diskstats_throughput.vda.graph_title Disk throughput for /dev/vda
sixpack.studio;server1.sixpack.studio:diskstats_throughput.vda.graph_args --base 1024
sixpack.studio;server1.sixpack.studio:diskstats_throughput.vda.graph_vlabel Pr ${graph_period} read (-) / write (+)
sixpack.studio;server1.sixpack.studio:diskstats_throughput.vda.graph_category disk
sixpack.studio;server1.sixpack.studio:diskstats_throughput.vda.graph_info This graph shows disk throughput in bytes pr ${graph_period}. The graph base is 1024 so KB is for Kibi bytes and so on.
sixpack.studio;server1.sixpack.studio:diskstats_throughput.vda.graph_order rdbytes wrbytes
sixpack.studio;server1.sixpack.studio:diskstats_throughput.vda.wrbytes.min 0
sixpack.studio;server1.sixpack.studio:diskstats_throughput.vda.wrbytes.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_throughput.vda.wrbytes.type GAUGE
sixpack.studio;server1.sixpack.studio:diskstats_throughput.vda.wrbytes.label Bytes
sixpack.studio;server1.sixpack.studio:diskstats_throughput.vda.wrbytes.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_throughput.vda.wrbytes.negative rdbytes
sixpack.studio;server1.sixpack.studio:diskstats_throughput.vda.wrbytes.draw LINE1
sixpack.studio;server1.sixpack.studio:diskstats_throughput.vda.rdbytes.draw LINE1
sixpack.studio;server1.sixpack.studio:diskstats_throughput.vda.rdbytes.label invisible
sixpack.studio;server1.sixpack.studio:diskstats_throughput.vda.rdbytes.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_throughput.vda.rdbytes.min 0
sixpack.studio;server1.sixpack.studio:diskstats_throughput.vda.rdbytes.type GAUGE
sixpack.studio;server1.sixpack.studio:diskstats_throughput.vda.rdbytes.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_throughput.vda.rdbytes.graph no
sixpack.studio;server1.sixpack.studio:threads.graph_title Number of threads
sixpack.studio;server1.sixpack.studio:threads.graph_vlabel number of threads
sixpack.studio;server1.sixpack.studio:threads.graph_category processes
sixpack.studio;server1.sixpack.studio:threads.graph_info This graph shows the number of threads.
sixpack.studio;server1.sixpack.studio:threads.graph_order threads
sixpack.studio;server1.sixpack.studio:threads.threads.info The current number of threads.
sixpack.studio;server1.sixpack.studio:threads.threads.update_rate 300
sixpack.studio;server1.sixpack.studio:threads.threads.graph_data_size normal
sixpack.studio;server1.sixpack.studio:threads.threads.label threads
sixpack.studio;server1.sixpack.studio:netstat.graph_title Netstat, combined
sixpack.studio;server1.sixpack.studio:netstat.graph_args --units=si -l 1 --base 1000 --logarithmic
sixpack.studio;server1.sixpack.studio:netstat.graph_vlabel TCP connections
sixpack.studio;server1.sixpack.studio:netstat.graph_category network
sixpack.studio;server1.sixpack.studio:netstat.graph_period second
sixpack.studio;server1.sixpack.studio:netstat.graph_info This graph shows the TCP activity of all the network interfaces combined.
sixpack.studio;server1.sixpack.studio:netstat.graph_order active passive failed resets established
sixpack.studio;server1.sixpack.studio:netstat.passive.min 0
sixpack.studio;server1.sixpack.studio:netstat.passive.type DERIVE
sixpack.studio;server1.sixpack.studio:netstat.passive.info The number of passive TCP openings per second.
sixpack.studio;server1.sixpack.studio:netstat.passive.update_rate 300
sixpack.studio;server1.sixpack.studio:netstat.passive.graph_data_size normal
sixpack.studio;server1.sixpack.studio:netstat.passive.label passive
sixpack.studio;server1.sixpack.studio:netstat.passive.max 50000
sixpack.studio;server1.sixpack.studio:netstat.active.max 50000
sixpack.studio;server1.sixpack.studio:netstat.active.label active
sixpack.studio;server1.sixpack.studio:netstat.active.graph_data_size normal
sixpack.studio;server1.sixpack.studio:netstat.active.update_rate 300
sixpack.studio;server1.sixpack.studio:netstat.active.info The number of active TCP openings per second.
sixpack.studio;server1.sixpack.studio:netstat.active.type DERIVE
sixpack.studio;server1.sixpack.studio:netstat.active.min 0
sixpack.studio;server1.sixpack.studio:netstat.failed.graph_data_size normal
sixpack.studio;server1.sixpack.studio:netstat.failed.max 50000
sixpack.studio;server1.sixpack.studio:netstat.failed.label failed
sixpack.studio;server1.sixpack.studio:netstat.failed.info The number of failed TCP connection attempts per second.
sixpack.studio;server1.sixpack.studio:netstat.failed.update_rate 300
sixpack.studio;server1.sixpack.studio:netstat.failed.type DERIVE
sixpack.studio;server1.sixpack.studio:netstat.failed.min 0
sixpack.studio;server1.sixpack.studio:netstat.established.type GAUGE
sixpack.studio;server1.sixpack.studio:netstat.established.update_rate 300
sixpack.studio;server1.sixpack.studio:netstat.established.info The number of currently open connections.
sixpack.studio;server1.sixpack.studio:netstat.established.label established
sixpack.studio;server1.sixpack.studio:netstat.established.graph_data_size normal
sixpack.studio;server1.sixpack.studio:netstat.resets.max 50000
sixpack.studio;server1.sixpack.studio:netstat.resets.label resets
sixpack.studio;server1.sixpack.studio:netstat.resets.graph_data_size normal
sixpack.studio;server1.sixpack.studio:netstat.resets.update_rate 300
sixpack.studio;server1.sixpack.studio:netstat.resets.info The number of TCP connection resets.
sixpack.studio;server1.sixpack.studio:netstat.resets.type DERIVE
sixpack.studio;server1.sixpack.studio:netstat.resets.min 0
sixpack.studio;server1.sixpack.studio:apache_processes.graph_title Apache processes
sixpack.studio;server1.sixpack.studio:apache_processes.graph_args --base 1000 -l 0
sixpack.studio;server1.sixpack.studio:apache_processes.graph_category apache
sixpack.studio;server1.sixpack.studio:apache_processes.graph_order busy80 idle80 busy80 idle80 free80
sixpack.studio;server1.sixpack.studio:apache_processes.graph_vlabel processes
sixpack.studio;server1.sixpack.studio:apache_processes.graph_total total
sixpack.studio;server1.sixpack.studio:apache_processes.free80.colour ccff00
sixpack.studio;server1.sixpack.studio:apache_processes.free80.label free slots 80
sixpack.studio;server1.sixpack.studio:apache_processes.free80.graph_data_size normal
sixpack.studio;server1.sixpack.studio:apache_processes.free80.draw STACK
sixpack.studio;server1.sixpack.studio:apache_processes.free80.update_rate 300
sixpack.studio;server1.sixpack.studio:apache_processes.busy80.colour 33cc00
sixpack.studio;server1.sixpack.studio:apache_processes.busy80.label busy servers 80
sixpack.studio;server1.sixpack.studio:apache_processes.busy80.graph_data_size normal
sixpack.studio;server1.sixpack.studio:apache_processes.busy80.draw AREA
sixpack.studio;server1.sixpack.studio:apache_processes.busy80.update_rate 300
sixpack.studio;server1.sixpack.studio:apache_processes.idle80.graph_data_size normal
sixpack.studio;server1.sixpack.studio:apache_processes.idle80.label idle servers 80
sixpack.studio;server1.sixpack.studio:apache_processes.idle80.draw STACK
sixpack.studio;server1.sixpack.studio:apache_processes.idle80.colour 0033ff
sixpack.studio;server1.sixpack.studio:apache_processes.idle80.update_rate 300
sixpack.studio;server1.sixpack.studio:mysql_bytes.graph_title MySQL throughput
sixpack.studio;server1.sixpack.studio:mysql_bytes.graph_args --base 1024
sixpack.studio;server1.sixpack.studio:mysql_bytes.graph_vlabel bytes received (-) / sent (+) per ${graph_period}
sixpack.studio;server1.sixpack.studio:mysql_bytes.graph_info Note that this is a old plugin which is no longer installed by default. It is retained for compatability with old installations.
sixpack.studio;server1.sixpack.studio:mysql_bytes.graph_category mysql
sixpack.studio;server1.sixpack.studio:mysql_bytes.graph_order recv sent
sixpack.studio;server1.sixpack.studio:mysql_bytes.sent.graph_data_size normal
sixpack.studio;server1.sixpack.studio:mysql_bytes.sent.label transfer rate
sixpack.studio;server1.sixpack.studio:mysql_bytes.sent.max 80000000
sixpack.studio;server1.sixpack.studio:mysql_bytes.sent.negative recv
sixpack.studio;server1.sixpack.studio:mysql_bytes.sent.draw LINE2
sixpack.studio;server1.sixpack.studio:mysql_bytes.sent.update_rate 300
sixpack.studio;server1.sixpack.studio:mysql_bytes.sent.type DERIVE
sixpack.studio;server1.sixpack.studio:mysql_bytes.sent.min 0
sixpack.studio;server1.sixpack.studio:mysql_bytes.recv.draw LINE2
sixpack.studio;server1.sixpack.studio:mysql_bytes.recv.label transfer rate
sixpack.studio;server1.sixpack.studio:mysql_bytes.recv.max 80000000
sixpack.studio;server1.sixpack.studio:mysql_bytes.recv.graph_data_size normal
sixpack.studio;server1.sixpack.studio:mysql_bytes.recv.type DERIVE
sixpack.studio;server1.sixpack.studio:mysql_bytes.recv.update_rate 300
sixpack.studio;server1.sixpack.studio:mysql_bytes.recv.graph no
sixpack.studio;server1.sixpack.studio:mysql_bytes.recv.min 0
sixpack.studio;server1.sixpack.studio:diskstats_iops.graph_title Disk IOs per device
sixpack.studio;server1.sixpack.studio:diskstats_iops.graph_args --base 1000
sixpack.studio;server1.sixpack.studio:diskstats_iops.graph_vlabel IOs/${graph_period} read (-) / write (+)
sixpack.studio;server1.sixpack.studio:diskstats_iops.graph_category disk
sixpack.studio;server1.sixpack.studio:diskstats_iops.graph_width 400
sixpack.studio;server1.sixpack.studio:diskstats_iops.graph_order loop0_rdio loop0_wrio sr0_rdio sr0_wrio vda_rdio vda_wrio
sixpack.studio;server1.sixpack.studio:diskstats_iops.vda_wrio.min 0
sixpack.studio;server1.sixpack.studio:diskstats_iops.vda_wrio.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_iops.vda_wrio.type GAUGE
sixpack.studio;server1.sixpack.studio:diskstats_iops.vda_wrio.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_iops.vda_wrio.label vda
sixpack.studio;server1.sixpack.studio:diskstats_iops.vda_wrio.negative vda_rdio
sixpack.studio;server1.sixpack.studio:diskstats_iops.vda_wrio.draw LINE1
sixpack.studio;server1.sixpack.studio:diskstats_iops.loop0_rdio.draw LINE1
sixpack.studio;server1.sixpack.studio:diskstats_iops.loop0_rdio.label loop0
sixpack.studio;server1.sixpack.studio:diskstats_iops.loop0_rdio.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_iops.loop0_rdio.type GAUGE
sixpack.studio;server1.sixpack.studio:diskstats_iops.loop0_rdio.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_iops.loop0_rdio.graph no
sixpack.studio;server1.sixpack.studio:diskstats_iops.loop0_rdio.min 0
sixpack.studio;server1.sixpack.studio:diskstats_iops.loop0_wrio.label loop0
sixpack.studio;server1.sixpack.studio:diskstats_iops.loop0_wrio.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_iops.loop0_wrio.draw LINE1
sixpack.studio;server1.sixpack.studio:diskstats_iops.loop0_wrio.negative loop0_rdio
sixpack.studio;server1.sixpack.studio:diskstats_iops.loop0_wrio.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_iops.loop0_wrio.type GAUGE
sixpack.studio;server1.sixpack.studio:diskstats_iops.loop0_wrio.min 0
sixpack.studio;server1.sixpack.studio:diskstats_iops.sr0_wrio.label sr0
sixpack.studio;server1.sixpack.studio:diskstats_iops.sr0_wrio.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_iops.sr0_wrio.draw LINE1
sixpack.studio;server1.sixpack.studio:diskstats_iops.sr0_wrio.negative sr0_rdio
sixpack.studio;server1.sixpack.studio:diskstats_iops.sr0_wrio.min 0
sixpack.studio;server1.sixpack.studio:diskstats_iops.sr0_wrio.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_iops.sr0_wrio.type GAUGE
sixpack.studio;server1.sixpack.studio:diskstats_iops.sr0_rdio.graph no
sixpack.studio;server1.sixpack.studio:diskstats_iops.sr0_rdio.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_iops.sr0_rdio.type GAUGE
sixpack.studio;server1.sixpack.studio:diskstats_iops.sr0_rdio.min 0
sixpack.studio;server1.sixpack.studio:diskstats_iops.sr0_rdio.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_iops.sr0_rdio.label sr0
sixpack.studio;server1.sixpack.studio:diskstats_iops.sr0_rdio.draw LINE1
sixpack.studio;server1.sixpack.studio:diskstats_iops.vda_rdio.draw LINE1
sixpack.studio;server1.sixpack.studio:diskstats_iops.vda_rdio.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_iops.vda_rdio.label vda
sixpack.studio;server1.sixpack.studio:diskstats_iops.vda_rdio.type GAUGE
sixpack.studio;server1.sixpack.studio:diskstats_iops.vda_rdio.graph no
sixpack.studio;server1.sixpack.studio:diskstats_iops.vda_rdio.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_iops.vda_rdio.min 0
sixpack.studio;server1.sixpack.studio:diskstats_utilization.sr0.graph_title Disk utilization for /dev/sr0
sixpack.studio;server1.sixpack.studio:diskstats_utilization.sr0.graph_args --base 1000 --lower-limit 0 --upper-limit 100 --rigid
sixpack.studio;server1.sixpack.studio:diskstats_utilization.sr0.graph_vlabel % busy
sixpack.studio;server1.sixpack.studio:diskstats_utilization.sr0.graph_category disk
sixpack.studio;server1.sixpack.studio:diskstats_utilization.sr0.graph_scale no
sixpack.studio;server1.sixpack.studio:diskstats_utilization.sr0.graph_order util
sixpack.studio;server1.sixpack.studio:diskstats_utilization.sr0.util.min 0
sixpack.studio;server1.sixpack.studio:diskstats_utilization.sr0.util.type GAUGE
sixpack.studio;server1.sixpack.studio:diskstats_utilization.sr0.util.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_utilization.sr0.util.info Utilization of the device in percent. If the time spent for I/O is close to 1000msec for a given second, the device is nearly 100% saturated.
sixpack.studio;server1.sixpack.studio:diskstats_utilization.sr0.util.draw LINE1
sixpack.studio;server1.sixpack.studio:diskstats_utilization.sr0.util.label Utilization
sixpack.studio;server1.sixpack.studio:diskstats_utilization.sr0.util.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_utilization.loop0.graph_title Disk utilization for /dev/loop0
sixpack.studio;server1.sixpack.studio:diskstats_utilization.loop0.graph_args --base 1000 --lower-limit 0 --upper-limit 100 --rigid
sixpack.studio;server1.sixpack.studio:diskstats_utilization.loop0.graph_vlabel % busy
sixpack.studio;server1.sixpack.studio:diskstats_utilization.loop0.graph_category disk
sixpack.studio;server1.sixpack.studio:diskstats_utilization.loop0.graph_scale no
sixpack.studio;server1.sixpack.studio:diskstats_utilization.loop0.graph_order util
sixpack.studio;server1.sixpack.studio:diskstats_utilization.loop0.util.type GAUGE
sixpack.studio;server1.sixpack.studio:diskstats_utilization.loop0.util.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_utilization.loop0.util.info Utilization of the device in percent. If the time spent for I/O is close to 1000msec for a given second, the device is nearly 100% saturated.
sixpack.studio;server1.sixpack.studio:diskstats_utilization.loop0.util.min 0
sixpack.studio;server1.sixpack.studio:diskstats_utilization.loop0.util.draw LINE1
sixpack.studio;server1.sixpack.studio:diskstats_utilization.loop0.util.label Utilization
sixpack.studio;server1.sixpack.studio:diskstats_utilization.loop0.util.graph_data_size normal
sixpack.studio;server1.sixpack.studio:users.graph_title Logged in users
sixpack.studio;server1.sixpack.studio:users.graph_args --base 1000 -l 0
sixpack.studio;server1.sixpack.studio:users.graph_vlabel Users
sixpack.studio;server1.sixpack.studio:users.graph_scale no
sixpack.studio;server1.sixpack.studio:users.graph_category system
sixpack.studio;server1.sixpack.studio:users.graph_printf %3.0lf
sixpack.studio;server1.sixpack.studio:users.graph_order tty pty pts X other
sixpack.studio;server1.sixpack.studio:users.other.graph_data_size normal
sixpack.studio;server1.sixpack.studio:users.other.label Other users
sixpack.studio;server1.sixpack.studio:users.other.colour FF0000
sixpack.studio;server1.sixpack.studio:users.other.info Users logged in by indeterminate method
sixpack.studio;server1.sixpack.studio:users.other.update_rate 300
sixpack.studio;server1.sixpack.studio:users.tty.draw AREASTACK
sixpack.studio;server1.sixpack.studio:users.tty.graph_data_size normal
sixpack.studio;server1.sixpack.studio:users.tty.label tty
sixpack.studio;server1.sixpack.studio:users.tty.colour 00FF00
sixpack.studio;server1.sixpack.studio:users.tty.update_rate 300
sixpack.studio;server1.sixpack.studio:users.pts.update_rate 300
sixpack.studio;server1.sixpack.studio:users.pts.colour 00FFFF
sixpack.studio;server1.sixpack.studio:users.pts.graph_data_size normal
sixpack.studio;server1.sixpack.studio:users.pts.label pts
sixpack.studio;server1.sixpack.studio:users.pts.draw AREASTACK
sixpack.studio;server1.sixpack.studio:users.X.update_rate 300
sixpack.studio;server1.sixpack.studio:users.X.info Users logged in on an X display
sixpack.studio;server1.sixpack.studio:users.X.draw AREASTACK
sixpack.studio;server1.sixpack.studio:users.X.label X displays
sixpack.studio;server1.sixpack.studio:users.X.graph_data_size normal
sixpack.studio;server1.sixpack.studio:users.X.colour 000000
sixpack.studio;server1.sixpack.studio:users.pty.graph_data_size normal
sixpack.studio;server1.sixpack.studio:users.pty.label pty
sixpack.studio;server1.sixpack.studio:users.pty.draw AREASTACK
sixpack.studio;server1.sixpack.studio:users.pty.colour 0000FF
sixpack.studio;server1.sixpack.studio:users.pty.update_rate 300
sixpack.studio;server1.sixpack.studio:netstat_established.graph_title Netstat, established only
sixpack.studio;server1.sixpack.studio:netstat_established.graph_args --lower-limit 0
sixpack.studio;server1.sixpack.studio:netstat_established.graph_vlabel TCP connections
sixpack.studio;server1.sixpack.studio:netstat_established.graph_category network
sixpack.studio;server1.sixpack.studio:netstat_established.graph_period second
sixpack.studio;server1.sixpack.studio:netstat_established.graph_info This graph shows the TCP activity of all the network interfaces combined.
sixpack.studio;server1.sixpack.studio:netstat_established.graph_order established
sixpack.studio;server1.sixpack.studio:netstat_established.established.graph_data_size normal
sixpack.studio;server1.sixpack.studio:netstat_established.established.label established
sixpack.studio;server1.sixpack.studio:netstat_established.established.type GAUGE
sixpack.studio;server1.sixpack.studio:netstat_established.established.info The number of currently open connections.
sixpack.studio;server1.sixpack.studio:netstat_established.established.update_rate 300
sixpack.studio;server1.sixpack.studio:mysql_slowqueries.graph_title MySQL slow queries
sixpack.studio;server1.sixpack.studio:mysql_slowqueries.graph_args --base 1000 -l 0
sixpack.studio;server1.sixpack.studio:mysql_slowqueries.graph_vlabel slow queries / ${graph_period}
sixpack.studio;server1.sixpack.studio:mysql_slowqueries.graph_category mysql
sixpack.studio;server1.sixpack.studio:mysql_slowqueries.graph_info Note that this is a old plugin which is no longer installed by default. It is retained for compatability with old installations.
sixpack.studio;server1.sixpack.studio:mysql_slowqueries.graph_order queries
sixpack.studio;server1.sixpack.studio:mysql_slowqueries.queries.graph_data_size normal
sixpack.studio;server1.sixpack.studio:mysql_slowqueries.queries.label slow queries
sixpack.studio;server1.sixpack.studio:mysql_slowqueries.queries.max 500000
sixpack.studio;server1.sixpack.studio:mysql_slowqueries.queries.min 0
sixpack.studio;server1.sixpack.studio:mysql_slowqueries.queries.update_rate 300
sixpack.studio;server1.sixpack.studio:mysql_slowqueries.queries.type DERIVE
sixpack.studio;server1.sixpack.studio:vmstat.graph_title VMstat
sixpack.studio;server1.sixpack.studio:vmstat.graph_args --base 1000 -l 0
sixpack.studio;server1.sixpack.studio:vmstat.graph_vlabel process states
sixpack.studio;server1.sixpack.studio:vmstat.graph_category processes
sixpack.studio;server1.sixpack.studio:vmstat.graph_order wait sleep
sixpack.studio;server1.sixpack.studio:vmstat.wait.label running
sixpack.studio;server1.sixpack.studio:vmstat.wait.max 500000
sixpack.studio;server1.sixpack.studio:vmstat.wait.graph_data_size normal
sixpack.studio;server1.sixpack.studio:vmstat.wait.update_rate 300
sixpack.studio;server1.sixpack.studio:vmstat.wait.type GAUGE
sixpack.studio;server1.sixpack.studio:vmstat.sleep.update_rate 300
sixpack.studio;server1.sixpack.studio:vmstat.sleep.type GAUGE
sixpack.studio;server1.sixpack.studio:vmstat.sleep.graph_data_size normal
sixpack.studio;server1.sixpack.studio:vmstat.sleep.label I/O sleep
sixpack.studio;server1.sixpack.studio:vmstat.sleep.max 500000
sixpack.studio;server1.sixpack.studio:exim_mailqueue.graph_title Exim Mailqueue
sixpack.studio;server1.sixpack.studio:exim_mailqueue.graph_args --base 1000 -l 0
sixpack.studio;server1.sixpack.studio:exim_mailqueue.graph_vlabel mails in queue
sixpack.studio;server1.sixpack.studio:exim_mailqueue.graph_category exim
sixpack.studio;server1.sixpack.studio:exim_mailqueue.graph_order mails frozen
sixpack.studio;server1.sixpack.studio:exim_mailqueue.frozen.graph_data_size normal
sixpack.studio;server1.sixpack.studio:exim_mailqueue.frozen.label frozen mails
sixpack.studio;server1.sixpack.studio:exim_mailqueue.frozen.warning 0:100
sixpack.studio;server1.sixpack.studio:exim_mailqueue.frozen.draw STACK
sixpack.studio;server1.sixpack.studio:exim_mailqueue.frozen.critical 0:200
sixpack.studio;server1.sixpack.studio:exim_mailqueue.frozen.colour 0022FF
sixpack.studio;server1.sixpack.studio:exim_mailqueue.frozen.update_rate 300
sixpack.studio;server1.sixpack.studio:exim_mailqueue.mails.colour 00AA00
sixpack.studio;server1.sixpack.studio:exim_mailqueue.mails.critical 0:200
sixpack.studio;server1.sixpack.studio:exim_mailqueue.mails.draw AREA
sixpack.studio;server1.sixpack.studio:exim_mailqueue.mails.warning 0:100
sixpack.studio;server1.sixpack.studio:exim_mailqueue.mails.label queued mails
sixpack.studio;server1.sixpack.studio:exim_mailqueue.mails.graph_data_size normal
sixpack.studio;server1.sixpack.studio:exim_mailqueue.mails.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_iops.vda.graph_title IOs for /dev/vda
sixpack.studio;server1.sixpack.studio:diskstats_iops.vda.graph_args --base 1000
sixpack.studio;server1.sixpack.studio:diskstats_iops.vda.graph_vlabel Units read (-) / write (+)
sixpack.studio;server1.sixpack.studio:diskstats_iops.vda.graph_category disk
sixpack.studio;server1.sixpack.studio:diskstats_iops.vda.graph_info This graph shows the number of IO operations pr second and the average size of these requests. Lots of small requests should result in in lower throughput (separate graph) and higher service time (separate graph). Please note that starting with munin-node 2.0 the divisor for K is 1000 instead of 1024 which it was prior to 2.0 beta 3. This is because the base for this graph is 1000 not 1024.
sixpack.studio;server1.sixpack.studio:diskstats_iops.vda.graph_order rdio wrio avgrdrqsz avgwrrqsz
sixpack.studio;server1.sixpack.studio:diskstats_iops.vda.wrio.negative rdio
sixpack.studio;server1.sixpack.studio:diskstats_iops.vda.wrio.draw LINE1
sixpack.studio;server1.sixpack.studio:diskstats_iops.vda.wrio.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_iops.vda.wrio.label IO/sec
sixpack.studio;server1.sixpack.studio:diskstats_iops.vda.wrio.min 0
sixpack.studio;server1.sixpack.studio:diskstats_iops.vda.wrio.type GAUGE
sixpack.studio;server1.sixpack.studio:diskstats_iops.vda.wrio.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_iops.vda.avgrdrqsz.min 0
sixpack.studio;server1.sixpack.studio:diskstats_iops.vda.avgrdrqsz.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_iops.vda.avgrdrqsz.graph no
sixpack.studio;server1.sixpack.studio:diskstats_iops.vda.avgrdrqsz.type GAUGE
sixpack.studio;server1.sixpack.studio:diskstats_iops.vda.avgrdrqsz.label dummy
sixpack.studio;server1.sixpack.studio:diskstats_iops.vda.avgrdrqsz.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_iops.vda.avgrdrqsz.draw LINE1
sixpack.studio;server1.sixpack.studio:diskstats_iops.vda.avgwrrqsz.negative avgrdrqsz
sixpack.studio;server1.sixpack.studio:diskstats_iops.vda.avgwrrqsz.draw LINE1
sixpack.studio;server1.sixpack.studio:diskstats_iops.vda.avgwrrqsz.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_iops.vda.avgwrrqsz.label Req Size (KB)
sixpack.studio;server1.sixpack.studio:diskstats_iops.vda.avgwrrqsz.type GAUGE
sixpack.studio;server1.sixpack.studio:diskstats_iops.vda.avgwrrqsz.info Average Request Size in kilobytes (1000 based)
sixpack.studio;server1.sixpack.studio:diskstats_iops.vda.avgwrrqsz.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_iops.vda.avgwrrqsz.min 0
sixpack.studio;server1.sixpack.studio:diskstats_iops.vda.rdio.type GAUGE
sixpack.studio;server1.sixpack.studio:diskstats_iops.vda.rdio.graph no
sixpack.studio;server1.sixpack.studio:diskstats_iops.vda.rdio.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_iops.vda.rdio.min 0
sixpack.studio;server1.sixpack.studio:diskstats_iops.vda.rdio.draw LINE1
sixpack.studio;server1.sixpack.studio:diskstats_iops.vda.rdio.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_iops.vda.rdio.label dummy
sixpack.studio;server1.sixpack.studio:open_files.graph_title File table usage
sixpack.studio;server1.sixpack.studio:open_files.graph_args --base 1000 -l 0
sixpack.studio;server1.sixpack.studio:open_files.graph_vlabel number of open files
sixpack.studio;server1.sixpack.studio:open_files.graph_category system
sixpack.studio;server1.sixpack.studio:open_files.graph_info This graph monitors the Linux open files table.
sixpack.studio;server1.sixpack.studio:open_files.graph_order used
sixpack.studio;server1.sixpack.studio:open_files.used.critical 589987
sixpack.studio;server1.sixpack.studio:open_files.used.graph_data_size normal
sixpack.studio;server1.sixpack.studio:open_files.used.warning 553865
sixpack.studio;server1.sixpack.studio:open_files.used.label open files
sixpack.studio;server1.sixpack.studio:open_files.used.info The number of currently open files.
sixpack.studio;server1.sixpack.studio:open_files.used.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_utilization.graph_title Utilization per device
sixpack.studio;server1.sixpack.studio:diskstats_utilization.graph_args --base 1000 --lower-limit 0 --upper-limit 100 --rigid
sixpack.studio;server1.sixpack.studio:diskstats_utilization.graph_vlabel % busy
sixpack.studio;server1.sixpack.studio:diskstats_utilization.graph_category disk
sixpack.studio;server1.sixpack.studio:diskstats_utilization.graph_width 400
sixpack.studio;server1.sixpack.studio:diskstats_utilization.graph_scale no
sixpack.studio;server1.sixpack.studio:diskstats_utilization.graph_order loop0_util sr0_util vda_util
sixpack.studio;server1.sixpack.studio:diskstats_utilization.sr0_util.info Utilization of the device
sixpack.studio;server1.sixpack.studio:diskstats_utilization.sr0_util.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_utilization.sr0_util.type GAUGE
sixpack.studio;server1.sixpack.studio:diskstats_utilization.sr0_util.min 0
sixpack.studio;server1.sixpack.studio:diskstats_utilization.sr0_util.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_utilization.sr0_util.label sr0
sixpack.studio;server1.sixpack.studio:diskstats_utilization.sr0_util.draw LINE1
sixpack.studio;server1.sixpack.studio:diskstats_utilization.vda_util.draw LINE1
sixpack.studio;server1.sixpack.studio:diskstats_utilization.vda_util.label vda
sixpack.studio;server1.sixpack.studio:diskstats_utilization.vda_util.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_utilization.vda_util.type GAUGE
sixpack.studio;server1.sixpack.studio:diskstats_utilization.vda_util.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_utilization.vda_util.info Utilization of the device
sixpack.studio;server1.sixpack.studio:diskstats_utilization.vda_util.min 0
sixpack.studio;server1.sixpack.studio:diskstats_utilization.loop0_util.draw LINE1
sixpack.studio;server1.sixpack.studio:diskstats_utilization.loop0_util.label loop0
sixpack.studio;server1.sixpack.studio:diskstats_utilization.loop0_util.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_utilization.loop0_util.type GAUGE
sixpack.studio;server1.sixpack.studio:diskstats_utilization.loop0_util.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_utilization.loop0_util.info Utilization of the device
sixpack.studio;server1.sixpack.studio:diskstats_utilization.loop0_util.min 0
sixpack.studio;server1.sixpack.studio:diskstats_latency.loop0.graph_title Average latency for /dev/loop0
sixpack.studio;server1.sixpack.studio:diskstats_latency.loop0.graph_args --base 1000 --logarithmic
sixpack.studio;server1.sixpack.studio:diskstats_latency.loop0.graph_vlabel seconds
sixpack.studio;server1.sixpack.studio:diskstats_latency.loop0.graph_category disk
sixpack.studio;server1.sixpack.studio:diskstats_latency.loop0.graph_info This graph shows average waiting time/latency for different categories of disk operations. The times that include the queue times indicate how busy your system is. If the waiting time hits 1 second then your I/O system is 100% busy.
sixpack.studio;server1.sixpack.studio:diskstats_latency.loop0.graph_order svctm avgwait avgrdwait avgwrwait
sixpack.studio;server1.sixpack.studio:diskstats_latency.loop0.avgwrwait.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_latency.loop0.avgwrwait.info Average wait time for a write I/O from request start to finish (includes queue times et al)
sixpack.studio;server1.sixpack.studio:diskstats_latency.loop0.avgwrwait.type GAUGE
sixpack.studio;server1.sixpack.studio:diskstats_latency.loop0.avgwrwait.min 0
sixpack.studio;server1.sixpack.studio:diskstats_latency.loop0.avgwrwait.label Write IO Wait time
sixpack.studio;server1.sixpack.studio:diskstats_latency.loop0.avgwrwait.warning 0:3
sixpack.studio;server1.sixpack.studio:diskstats_latency.loop0.avgwrwait.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_latency.loop0.avgwrwait.draw LINE1
sixpack.studio;server1.sixpack.studio:diskstats_latency.loop0.svctm.label Device IO time
sixpack.studio;server1.sixpack.studio:diskstats_latency.loop0.svctm.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_latency.loop0.svctm.draw LINE1
sixpack.studio;server1.sixpack.studio:diskstats_latency.loop0.svctm.min 0
sixpack.studio;server1.sixpack.studio:diskstats_latency.loop0.svctm.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_latency.loop0.svctm.info Average time an I/O takes on the block device not including any queue times, just the round trip time for the disk request.
sixpack.studio;server1.sixpack.studio:diskstats_latency.loop0.svctm.type GAUGE
sixpack.studio;server1.sixpack.studio:diskstats_latency.loop0.avgwait.draw LINE1
sixpack.studio;server1.sixpack.studio:diskstats_latency.loop0.avgwait.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_latency.loop0.avgwait.label IO Wait time
sixpack.studio;server1.sixpack.studio:diskstats_latency.loop0.avgwait.type GAUGE
sixpack.studio;server1.sixpack.studio:diskstats_latency.loop0.avgwait.info Average wait time for an I/O from request start to finish (includes queue times et al)
sixpack.studio;server1.sixpack.studio:diskstats_latency.loop0.avgwait.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_latency.loop0.avgwait.min 0
sixpack.studio;server1.sixpack.studio:diskstats_latency.loop0.avgrdwait.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_latency.loop0.avgrdwait.label Read IO Wait time
sixpack.studio;server1.sixpack.studio:diskstats_latency.loop0.avgrdwait.warning 0:3
sixpack.studio;server1.sixpack.studio:diskstats_latency.loop0.avgrdwait.draw LINE1
sixpack.studio;server1.sixpack.studio:diskstats_latency.loop0.avgrdwait.info Average wait time for a read I/O from request start to finish (includes queue times et al)
sixpack.studio;server1.sixpack.studio:diskstats_latency.loop0.avgrdwait.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_latency.loop0.avgrdwait.type GAUGE
sixpack.studio;server1.sixpack.studio:diskstats_latency.loop0.avgrdwait.min 0
sixpack.studio;server1.sixpack.studio:memory.graph_args --base 1024 -l 0 --upper-limit 6225563648
sixpack.studio;server1.sixpack.studio:memory.graph_vlabel Bytes
sixpack.studio;server1.sixpack.studio:memory.graph_title Memory usage
sixpack.studio;server1.sixpack.studio:memory.graph_category system
sixpack.studio;server1.sixpack.studio:memory.graph_info This graph shows what the machine uses memory for.
sixpack.studio;server1.sixpack.studio:memory.graph_order apps page_tables per_cpu swap_cache slab shmem cached buffers free swap apps buffers swap cached free shmem slab swap_cache page_tables per_cpu vmalloc_used committed mapped active inactive
sixpack.studio;server1.sixpack.studio:memory.inactive.draw LINE2
sixpack.studio;server1.sixpack.studio:memory.inactive.label inactive
sixpack.studio;server1.sixpack.studio:memory.inactive.graph_data_size normal
sixpack.studio;server1.sixpack.studio:memory.inactive.colour COLOUR15
sixpack.studio;server1.sixpack.studio:memory.inactive.update_rate 300
sixpack.studio;server1.sixpack.studio:memory.inactive.info Memory not currently used.
sixpack.studio;server1.sixpack.studio:memory.active.info Memory recently used. Not reclaimed unless absolutely necessary.
sixpack.studio;server1.sixpack.studio:memory.active.update_rate 300
sixpack.studio;server1.sixpack.studio:memory.active.draw LINE2
sixpack.studio;server1.sixpack.studio:memory.active.graph_data_size normal
sixpack.studio;server1.sixpack.studio:memory.active.label active
sixpack.studio;server1.sixpack.studio:memory.active.colour COLOUR12
sixpack.studio;server1.sixpack.studio:memory.swap_cache.info A piece of memory that keeps track of pages that have been fetched from swap but not yet been modified.
sixpack.studio;server1.sixpack.studio:memory.swap_cache.update_rate 300
sixpack.studio;server1.sixpack.studio:memory.swap_cache.draw STACK
sixpack.studio;server1.sixpack.studio:memory.swap_cache.graph_data_size normal
sixpack.studio;server1.sixpack.studio:memory.swap_cache.label swap_cache
sixpack.studio;server1.sixpack.studio:memory.swap_cache.colour COLOUR2
sixpack.studio;server1.sixpack.studio:memory.cached.colour COLOUR4
sixpack.studio;server1.sixpack.studio:memory.cached.draw STACK
sixpack.studio;server1.sixpack.studio:memory.cached.graph_data_size normal
sixpack.studio;server1.sixpack.studio:memory.cached.label cache
sixpack.studio;server1.sixpack.studio:memory.cached.info Parked file data (file content) cache.
sixpack.studio;server1.sixpack.studio:memory.cached.update_rate 300
sixpack.studio;server1.sixpack.studio:memory.apps.info Memory used by user-space applications.
sixpack.studio;server1.sixpack.studio:memory.apps.update_rate 300
sixpack.studio;server1.sixpack.studio:memory.apps.graph_data_size normal
sixpack.studio;server1.sixpack.studio:memory.apps.label apps
sixpack.studio;server1.sixpack.studio:memory.apps.draw AREA
sixpack.studio;server1.sixpack.studio:memory.apps.colour COLOUR0
sixpack.studio;server1.sixpack.studio:memory.committed.colour COLOUR10
sixpack.studio;server1.sixpack.studio:memory.committed.graph_data_size normal
sixpack.studio;server1.sixpack.studio:memory.committed.label committed
sixpack.studio;server1.sixpack.studio:memory.committed.draw LINE2
sixpack.studio;server1.sixpack.studio:memory.committed.info The amount of memory allocated to programs. Overcommitting is normal, but may indicate memory leaks.
sixpack.studio;server1.sixpack.studio:memory.committed.update_rate 300
sixpack.studio;server1.sixpack.studio:memory.swap.update_rate 300
sixpack.studio;server1.sixpack.studio:memory.swap.info Swap space used.
sixpack.studio;server1.sixpack.studio:memory.swap.colour COLOUR7
sixpack.studio;server1.sixpack.studio:memory.swap.draw STACK
sixpack.studio;server1.sixpack.studio:memory.swap.label swap
sixpack.studio;server1.sixpack.studio:memory.swap.graph_data_size normal
sixpack.studio;server1.sixpack.studio:memory.page_tables.info Memory used to map between virtual and physical memory addresses.
sixpack.studio;server1.sixpack.studio:memory.page_tables.update_rate 300
sixpack.studio;server1.sixpack.studio:memory.page_tables.colour COLOUR1
sixpack.studio;server1.sixpack.studio:memory.page_tables.graph_data_size normal
sixpack.studio;server1.sixpack.studio:memory.page_tables.label page_tables
sixpack.studio;server1.sixpack.studio:memory.page_tables.draw STACK
sixpack.studio;server1.sixpack.studio:memory.buffers.update_rate 300
sixpack.studio;server1.sixpack.studio:memory.buffers.info Block device (e.g. harddisk) cache. Also where "dirty" blocks are stored until written.
sixpack.studio;server1.sixpack.studio:memory.buffers.colour COLOUR5
sixpack.studio;server1.sixpack.studio:memory.buffers.draw STACK
sixpack.studio;server1.sixpack.studio:memory.buffers.label buffers
sixpack.studio;server1.sixpack.studio:memory.buffers.graph_data_size normal
sixpack.studio;server1.sixpack.studio:memory.mapped.info All mmap()ed pages.
sixpack.studio;server1.sixpack.studio:memory.mapped.update_rate 300
sixpack.studio;server1.sixpack.studio:memory.mapped.colour COLOUR11
sixpack.studio;server1.sixpack.studio:memory.mapped.graph_data_size normal
sixpack.studio;server1.sixpack.studio:memory.mapped.label mapped
sixpack.studio;server1.sixpack.studio:memory.mapped.draw LINE2
sixpack.studio;server1.sixpack.studio:memory.shmem.draw STACK
sixpack.studio;server1.sixpack.studio:memory.shmem.graph_data_size normal
sixpack.studio;server1.sixpack.studio:memory.shmem.label shmem
sixpack.studio;server1.sixpack.studio:memory.shmem.colour COLOUR9
sixpack.studio;server1.sixpack.studio:memory.shmem.info Shared Memory (SYSV SHM segments, tmpfs).
sixpack.studio;server1.sixpack.studio:memory.shmem.update_rate 300
sixpack.studio;server1.sixpack.studio:memory.per_cpu.graph_data_size normal
sixpack.studio;server1.sixpack.studio:memory.per_cpu.label per_cpu
sixpack.studio;server1.sixpack.studio:memory.per_cpu.draw STACK
sixpack.studio;server1.sixpack.studio:memory.per_cpu.colour COLOUR20
sixpack.studio;server1.sixpack.studio:memory.per_cpu.info Per CPU allocations
sixpack.studio;server1.sixpack.studio:memory.per_cpu.update_rate 300
sixpack.studio;server1.sixpack.studio:memory.vmalloc_used.info 'VMalloc' (kernel) memory used
sixpack.studio;server1.sixpack.studio:memory.vmalloc_used.update_rate 300
sixpack.studio;server1.sixpack.studio:memory.vmalloc_used.draw LINE2
sixpack.studio;server1.sixpack.studio:memory.vmalloc_used.graph_data_size normal
sixpack.studio;server1.sixpack.studio:memory.vmalloc_used.label vmalloc_used
sixpack.studio;server1.sixpack.studio:memory.vmalloc_used.colour COLOUR8
sixpack.studio;server1.sixpack.studio:memory.free.label unused
sixpack.studio;server1.sixpack.studio:memory.free.graph_data_size normal
sixpack.studio;server1.sixpack.studio:memory.free.draw STACK
sixpack.studio;server1.sixpack.studio:memory.free.colour COLOUR6
sixpack.studio;server1.sixpack.studio:memory.free.update_rate 300
sixpack.studio;server1.sixpack.studio:memory.free.info Wasted memory. Memory that is not used for anything at all.
sixpack.studio;server1.sixpack.studio:memory.slab.colour COLOUR3
sixpack.studio;server1.sixpack.studio:memory.slab.draw STACK
sixpack.studio;server1.sixpack.studio:memory.slab.graph_data_size normal
sixpack.studio;server1.sixpack.studio:memory.slab.label slab_cache
sixpack.studio;server1.sixpack.studio:memory.slab.info Memory used by the kernel (major users are caches like inode, dentry, etc).
sixpack.studio;server1.sixpack.studio:memory.slab.update_rate 300
sixpack.studio;server1.sixpack.studio:fw_packets.graph_title Firewall Throughput
sixpack.studio;server1.sixpack.studio:fw_packets.graph_args --base 1000 -l 0
sixpack.studio;server1.sixpack.studio:fw_packets.graph_vlabel Packets/${graph_period}
sixpack.studio;server1.sixpack.studio:fw_packets.graph_category network
sixpack.studio;server1.sixpack.studio:fw_packets.graph_order received forwarded
sixpack.studio;server1.sixpack.studio:fw_packets.forwarded.draw LINE2
sixpack.studio;server1.sixpack.studio:fw_packets.forwarded.label Forwarded
sixpack.studio;server1.sixpack.studio:fw_packets.forwarded.graph_data_size normal
sixpack.studio;server1.sixpack.studio:fw_packets.forwarded.min 0
sixpack.studio;server1.sixpack.studio:fw_packets.forwarded.type DERIVE
sixpack.studio;server1.sixpack.studio:fw_packets.forwarded.update_rate 300
sixpack.studio;server1.sixpack.studio:fw_packets.received.min 0
sixpack.studio;server1.sixpack.studio:fw_packets.received.update_rate 300
sixpack.studio;server1.sixpack.studio:fw_packets.received.type DERIVE
sixpack.studio;server1.sixpack.studio:fw_packets.received.graph_data_size normal
sixpack.studio;server1.sixpack.studio:fw_packets.received.label Received
sixpack.studio;server1.sixpack.studio:fw_packets.received.draw AREA
sixpack.studio;server1.sixpack.studio:df_inode.graph_title Inode usage in percent
sixpack.studio;server1.sixpack.studio:df_inode.graph_args --upper-limit 100 -l 0
sixpack.studio;server1.sixpack.studio:df_inode.graph_vlabel %
sixpack.studio;server1.sixpack.studio:df_inode.graph_scale no
sixpack.studio;server1.sixpack.studio:df_inode.graph_category disk
sixpack.studio;server1.sixpack.studio:df_inode.graph_order devtmpfs _dev_shm _run _sys_fs_cgroup _dev_vda2 _dev_loop0 _run_user_0 _run_user_978
sixpack.studio;server1.sixpack.studio:df_inode._run_user_978.critical 98
sixpack.studio;server1.sixpack.studio:df_inode._run_user_978.graph_data_size normal
sixpack.studio;server1.sixpack.studio:df_inode._run_user_978.warning 92
sixpack.studio;server1.sixpack.studio:df_inode._run_user_978.label /run/user/978
sixpack.studio;server1.sixpack.studio:df_inode._run_user_978.update_rate 300
sixpack.studio;server1.sixpack.studio:df_inode._dev_loop0.update_rate 300
sixpack.studio;server1.sixpack.studio:df_inode._dev_loop0.critical 98
sixpack.studio;server1.sixpack.studio:df_inode._dev_loop0.graph_data_size normal
sixpack.studio;server1.sixpack.studio:df_inode._dev_loop0.warning 92
sixpack.studio;server1.sixpack.studio:df_inode._dev_loop0.label /tmp
sixpack.studio;server1.sixpack.studio:df_inode._dev_vda2.update_rate 300
sixpack.studio;server1.sixpack.studio:df_inode._dev_vda2.graph_data_size normal
sixpack.studio;server1.sixpack.studio:df_inode._dev_vda2.warning 92
sixpack.studio;server1.sixpack.studio:df_inode._dev_vda2.label /
sixpack.studio;server1.sixpack.studio:df_inode._dev_vda2.critical 98
sixpack.studio;server1.sixpack.studio:df_inode.devtmpfs.update_rate 300
sixpack.studio;server1.sixpack.studio:df_inode.devtmpfs.label /dev
sixpack.studio;server1.sixpack.studio:df_inode.devtmpfs.warning 92
sixpack.studio;server1.sixpack.studio:df_inode.devtmpfs.graph_data_size normal
sixpack.studio;server1.sixpack.studio:df_inode.devtmpfs.critical 98
sixpack.studio;server1.sixpack.studio:df_inode._run.update_rate 300
sixpack.studio;server1.sixpack.studio:df_inode._run.critical 98
sixpack.studio;server1.sixpack.studio:df_inode._run.warning 92
sixpack.studio;server1.sixpack.studio:df_inode._run.label /run
sixpack.studio;server1.sixpack.studio:df_inode._run.graph_data_size normal
sixpack.studio;server1.sixpack.studio:df_inode._dev_shm.update_rate 300
sixpack.studio;server1.sixpack.studio:df_inode._dev_shm.warning 92
sixpack.studio;server1.sixpack.studio:df_inode._dev_shm.label /dev/shm
sixpack.studio;server1.sixpack.studio:df_inode._dev_shm.graph_data_size normal
sixpack.studio;server1.sixpack.studio:df_inode._dev_shm.critical 98
sixpack.studio;server1.sixpack.studio:df_inode._sys_fs_cgroup.critical 98
sixpack.studio;server1.sixpack.studio:df_inode._sys_fs_cgroup.graph_data_size normal
sixpack.studio;server1.sixpack.studio:df_inode._sys_fs_cgroup.label /sys/fs/cgroup
sixpack.studio;server1.sixpack.studio:df_inode._sys_fs_cgroup.warning 92
sixpack.studio;server1.sixpack.studio:df_inode._sys_fs_cgroup.update_rate 300
sixpack.studio;server1.sixpack.studio:df_inode._run_user_0.update_rate 300
sixpack.studio;server1.sixpack.studio:df_inode._run_user_0.label /run/user/0
sixpack.studio;server1.sixpack.studio:df_inode._run_user_0.warning 92
sixpack.studio;server1.sixpack.studio:df_inode._run_user_0.graph_data_size normal
sixpack.studio;server1.sixpack.studio:df_inode._run_user_0.critical 98
sixpack.studio;server1.sixpack.studio:http_loadtime.graph_title HTTP loadtime of a page
sixpack.studio;server1.sixpack.studio:http_loadtime.graph_args --base 1000 -l 0
sixpack.studio;server1.sixpack.studio:http_loadtime.graph_vlabel Load time in seconds
sixpack.studio;server1.sixpack.studio:http_loadtime.graph_category network
sixpack.studio;server1.sixpack.studio:http_loadtime.graph_info This graph shows the load time in seconds
sixpack.studio;server1.sixpack.studio:http_loadtime.graph_order http___localhost_
sixpack.studio;server1.sixpack.studio:http_loadtime.http___localhost_.info page load time
sixpack.studio;server1.sixpack.studio:http_loadtime.http___localhost_.update_rate 300
sixpack.studio;server1.sixpack.studio:http_loadtime.http___localhost_.graph_data_size normal
sixpack.studio;server1.sixpack.studio:http_loadtime.http___localhost_.label http://localhost/
sixpack.studio;server1.sixpack.studio:diskstats_latency.sr0.graph_title Average latency for /dev/sr0
sixpack.studio;server1.sixpack.studio:diskstats_latency.sr0.graph_args --base 1000 --logarithmic
sixpack.studio;server1.sixpack.studio:diskstats_latency.sr0.graph_vlabel seconds
sixpack.studio;server1.sixpack.studio:diskstats_latency.sr0.graph_category disk
sixpack.studio;server1.sixpack.studio:diskstats_latency.sr0.graph_info This graph shows average waiting time/latency for different categories of disk operations. The times that include the queue times indicate how busy your system is. If the waiting time hits 1 second then your I/O system is 100% busy.
sixpack.studio;server1.sixpack.studio:diskstats_latency.sr0.graph_order svctm avgwait avgrdwait avgwrwait
sixpack.studio;server1.sixpack.studio:diskstats_latency.sr0.avgwrwait.info Average wait time for a write I/O from request start to finish (includes queue times et al)
sixpack.studio;server1.sixpack.studio:diskstats_latency.sr0.avgwrwait.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_latency.sr0.avgwrwait.type GAUGE
sixpack.studio;server1.sixpack.studio:diskstats_latency.sr0.avgwrwait.min 0
sixpack.studio;server1.sixpack.studio:diskstats_latency.sr0.avgwrwait.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_latency.sr0.avgwrwait.warning 0:3
sixpack.studio;server1.sixpack.studio:diskstats_latency.sr0.avgwrwait.label Write IO Wait time
sixpack.studio;server1.sixpack.studio:diskstats_latency.sr0.avgwrwait.draw LINE1
sixpack.studio;server1.sixpack.studio:diskstats_latency.sr0.avgwait.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_latency.sr0.avgwait.label IO Wait time
sixpack.studio;server1.sixpack.studio:diskstats_latency.sr0.avgwait.draw LINE1
sixpack.studio;server1.sixpack.studio:diskstats_latency.sr0.avgwait.info Average wait time for an I/O from request start to finish (includes queue times et al)
sixpack.studio;server1.sixpack.studio:diskstats_latency.sr0.avgwait.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_latency.sr0.avgwait.type GAUGE
sixpack.studio;server1.sixpack.studio:diskstats_latency.sr0.avgwait.min 0
sixpack.studio;server1.sixpack.studio:diskstats_latency.sr0.svctm.min 0
sixpack.studio;server1.sixpack.studio:diskstats_latency.sr0.svctm.type GAUGE
sixpack.studio;server1.sixpack.studio:diskstats_latency.sr0.svctm.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_latency.sr0.svctm.info Average time an I/O takes on the block device not including any queue times, just the round trip time for the disk request.
sixpack.studio;server1.sixpack.studio:diskstats_latency.sr0.svctm.draw LINE1
sixpack.studio;server1.sixpack.studio:diskstats_latency.sr0.svctm.label Device IO time
sixpack.studio;server1.sixpack.studio:diskstats_latency.sr0.svctm.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_latency.sr0.avgrdwait.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_latency.sr0.avgrdwait.label Read IO Wait time
sixpack.studio;server1.sixpack.studio:diskstats_latency.sr0.avgrdwait.warning 0:3
sixpack.studio;server1.sixpack.studio:diskstats_latency.sr0.avgrdwait.draw LINE1
sixpack.studio;server1.sixpack.studio:diskstats_latency.sr0.avgrdwait.min 0
sixpack.studio;server1.sixpack.studio:diskstats_latency.sr0.avgrdwait.info Average wait time for a read I/O from request start to finish (includes queue times et al)
sixpack.studio;server1.sixpack.studio:diskstats_latency.sr0.avgrdwait.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_latency.sr0.avgrdwait.type GAUGE
sixpack.studio;server1.sixpack.studio:cpu.graph_title CPU usage
sixpack.studio;server1.sixpack.studio:cpu.graph_order system user nice idle iowait irq softirq system user nice idle iowait irq softirq steal guest
sixpack.studio;server1.sixpack.studio:cpu.graph_args --base 1000 -r --lower-limit 0 --upper-limit 400
sixpack.studio;server1.sixpack.studio:cpu.graph_vlabel %
sixpack.studio;server1.sixpack.studio:cpu.graph_scale no
sixpack.studio;server1.sixpack.studio:cpu.graph_info This graph shows how CPU time is spent.
sixpack.studio;server1.sixpack.studio:cpu.graph_category system
sixpack.studio;server1.sixpack.studio:cpu.graph_period second
sixpack.studio;server1.sixpack.studio:cpu.nice.min 0
sixpack.studio;server1.sixpack.studio:cpu.nice.update_rate 300
sixpack.studio;server1.sixpack.studio:cpu.nice.info CPU time spent by nice(1)d programs
sixpack.studio;server1.sixpack.studio:cpu.nice.type DERIVE
sixpack.studio;server1.sixpack.studio:cpu.nice.label nice
sixpack.studio;server1.sixpack.studio:cpu.nice.graph_data_size normal
sixpack.studio;server1.sixpack.studio:cpu.nice.draw STACK
sixpack.studio;server1.sixpack.studio:cpu.user.draw STACK
sixpack.studio;server1.sixpack.studio:cpu.user.label user
sixpack.studio;server1.sixpack.studio:cpu.user.graph_data_size normal
sixpack.studio;server1.sixpack.studio:cpu.user.type DERIVE
sixpack.studio;server1.sixpack.studio:cpu.user.update_rate 300
sixpack.studio;server1.sixpack.studio:cpu.user.info CPU time spent by normal programs and daemons
sixpack.studio;server1.sixpack.studio:cpu.user.min 0
sixpack.studio;server1.sixpack.studio:cpu.irq.draw STACK
sixpack.studio;server1.sixpack.studio:cpu.irq.graph_data_size normal
sixpack.studio;server1.sixpack.studio:cpu.irq.label irq
sixpack.studio;server1.sixpack.studio:cpu.irq.min 0
sixpack.studio;server1.sixpack.studio:cpu.irq.type DERIVE
sixpack.studio;server1.sixpack.studio:cpu.irq.info CPU time spent handling interrupts
sixpack.studio;server1.sixpack.studio:cpu.irq.update_rate 300
sixpack.studio;server1.sixpack.studio:cpu.iowait.draw STACK
sixpack.studio;server1.sixpack.studio:cpu.iowait.label iowait
sixpack.studio;server1.sixpack.studio:cpu.iowait.graph_data_size normal
sixpack.studio;server1.sixpack.studio:cpu.iowait.min 0
sixpack.studio;server1.sixpack.studio:cpu.iowait.type DERIVE
sixpack.studio;server1.sixpack.studio:cpu.iowait.update_rate 300
sixpack.studio;server1.sixpack.studio:cpu.iowait.info CPU time spent waiting for I/O operations to finish when there is nothing else to do.
sixpack.studio;server1.sixpack.studio:cpu.system.graph_data_size normal
sixpack.studio;server1.sixpack.studio:cpu.system.label system
sixpack.studio;server1.sixpack.studio:cpu.system.draw AREA
sixpack.studio;server1.sixpack.studio:cpu.system.info CPU time spent by the kernel in system activities
sixpack.studio;server1.sixpack.studio:cpu.system.update_rate 300
sixpack.studio;server1.sixpack.studio:cpu.system.type DERIVE
sixpack.studio;server1.sixpack.studio:cpu.system.min 0
sixpack.studio;server1.sixpack.studio:cpu.softirq.draw STACK
sixpack.studio;server1.sixpack.studio:cpu.softirq.label softirq
sixpack.studio;server1.sixpack.studio:cpu.softirq.graph_data_size normal
sixpack.studio;server1.sixpack.studio:cpu.softirq.type DERIVE
sixpack.studio;server1.sixpack.studio:cpu.softirq.update_rate 300
sixpack.studio;server1.sixpack.studio:cpu.softirq.info CPU time spent handling "batched" interrupts
sixpack.studio;server1.sixpack.studio:cpu.softirq.min 0
sixpack.studio;server1.sixpack.studio:cpu.idle.update_rate 300
sixpack.studio;server1.sixpack.studio:cpu.idle.info Idle CPU time
sixpack.studio;server1.sixpack.studio:cpu.idle.type DERIVE
sixpack.studio;server1.sixpack.studio:cpu.idle.min 0
sixpack.studio;server1.sixpack.studio:cpu.idle.label idle
sixpack.studio;server1.sixpack.studio:cpu.idle.graph_data_size normal
sixpack.studio;server1.sixpack.studio:cpu.idle.draw STACK
sixpack.studio;server1.sixpack.studio:cpu.guest.label guest
sixpack.studio;server1.sixpack.studio:cpu.guest.graph_data_size normal
sixpack.studio;server1.sixpack.studio:cpu.guest.draw STACK
sixpack.studio;server1.sixpack.studio:cpu.guest.update_rate 300
sixpack.studio;server1.sixpack.studio:cpu.guest.info The time spent running a virtual CPU for guest operating systems under the control of the Linux kernel.
sixpack.studio;server1.sixpack.studio:cpu.guest.type DERIVE
sixpack.studio;server1.sixpack.studio:cpu.guest.min 0
sixpack.studio;server1.sixpack.studio:cpu.steal.draw STACK
sixpack.studio;server1.sixpack.studio:cpu.steal.graph_data_size normal
sixpack.studio;server1.sixpack.studio:cpu.steal.label steal
sixpack.studio;server1.sixpack.studio:cpu.steal.type DERIVE
sixpack.studio;server1.sixpack.studio:cpu.steal.info The time that a virtual CPU had runnable tasks, but the virtual CPU itself was not running
sixpack.studio;server1.sixpack.studio:cpu.steal.update_rate 300
sixpack.studio;server1.sixpack.studio:cpu.steal.min 0
sixpack.studio;server1.sixpack.studio:munin_stats.graph_title Munin processing time
sixpack.studio;server1.sixpack.studio:munin_stats.graph_info This graph shows the run time of the four different processes making up a munin-master run. Munin-master is run from cron every 5 minutes and we want each of the programmes in munin-master to complete before the next instance starts. Especially munin-update and munin-graph are time consuming and their run time bears watching. If munin-update uses too long time to run please see the munin-update graph to determine which host is slowing it down. If munin-graph is running too slow you need to get clever (email the munin-users mailing list) unless you can buy a faster computer with better disks to run munin on.
sixpack.studio;server1.sixpack.studio:munin_stats.graph_args --base 1000 -l 0
sixpack.studio;server1.sixpack.studio:munin_stats.graph_scale yes
sixpack.studio;server1.sixpack.studio:munin_stats.graph_vlabel seconds
sixpack.studio;server1.sixpack.studio:munin_stats.graph_category munin
sixpack.studio;server1.sixpack.studio:munin_stats.graph_order update graph html limits
sixpack.studio;server1.sixpack.studio:munin_stats.graph.label munin graph
sixpack.studio;server1.sixpack.studio:munin_stats.graph.warning 240
sixpack.studio;server1.sixpack.studio:munin_stats.graph.graph_data_size normal
sixpack.studio;server1.sixpack.studio:munin_stats.graph.draw AREASTACK
sixpack.studio;server1.sixpack.studio:munin_stats.graph.critical 285
sixpack.studio;server1.sixpack.studio:munin_stats.graph.update_rate 300
sixpack.studio;server1.sixpack.studio:munin_stats.limits.update_rate 300
sixpack.studio;server1.sixpack.studio:munin_stats.limits.draw AREASTACK
sixpack.studio;server1.sixpack.studio:munin_stats.limits.label munin limits
sixpack.studio;server1.sixpack.studio:munin_stats.limits.graph_data_size normal
sixpack.studio;server1.sixpack.studio:munin_stats.html.label munin html
sixpack.studio;server1.sixpack.studio:munin_stats.html.graph_data_size normal
sixpack.studio;server1.sixpack.studio:munin_stats.html.draw AREASTACK
sixpack.studio;server1.sixpack.studio:munin_stats.html.update_rate 300
sixpack.studio;server1.sixpack.studio:munin_stats.update.update_rate 300
sixpack.studio;server1.sixpack.studio:munin_stats.update.critical 285
sixpack.studio;server1.sixpack.studio:munin_stats.update.draw AREASTACK
sixpack.studio;server1.sixpack.studio:munin_stats.update.warning 240
sixpack.studio;server1.sixpack.studio:munin_stats.update.label munin update
sixpack.studio;server1.sixpack.studio:munin_stats.update.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_utilization.vda.graph_title Disk utilization for /dev/vda
sixpack.studio;server1.sixpack.studio:diskstats_utilization.vda.graph_args --base 1000 --lower-limit 0 --upper-limit 100 --rigid
sixpack.studio;server1.sixpack.studio:diskstats_utilization.vda.graph_vlabel % busy
sixpack.studio;server1.sixpack.studio:diskstats_utilization.vda.graph_category disk
sixpack.studio;server1.sixpack.studio:diskstats_utilization.vda.graph_scale no
sixpack.studio;server1.sixpack.studio:diskstats_utilization.vda.graph_order util
sixpack.studio;server1.sixpack.studio:diskstats_utilization.vda.util.min 0
sixpack.studio;server1.sixpack.studio:diskstats_utilization.vda.util.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_utilization.vda.util.info Utilization of the device in percent. If the time spent for I/O is close to 1000msec for a given second, the device is nearly 100% saturated.
sixpack.studio;server1.sixpack.studio:diskstats_utilization.vda.util.type GAUGE
sixpack.studio;server1.sixpack.studio:diskstats_utilization.vda.util.label Utilization
sixpack.studio;server1.sixpack.studio:diskstats_utilization.vda.util.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_utilization.vda.util.draw LINE1
sixpack.studio;server1.sixpack.studio:swap.graph_title Swap in/out
sixpack.studio;server1.sixpack.studio:swap.graph_args -l 0 --base 1000
sixpack.studio;server1.sixpack.studio:swap.graph_vlabel pages per ${graph_period} in (-) / out (+)
sixpack.studio;server1.sixpack.studio:swap.graph_category system
sixpack.studio;server1.sixpack.studio:swap.graph_order swap_in swap_out
sixpack.studio;server1.sixpack.studio:swap.swap_in.label swap
sixpack.studio;server1.sixpack.studio:swap.swap_in.max 100000
sixpack.studio;server1.sixpack.studio:swap.swap_in.graph_data_size normal
sixpack.studio;server1.sixpack.studio:swap.swap_in.min 0
sixpack.studio;server1.sixpack.studio:swap.swap_in.update_rate 300
sixpack.studio;server1.sixpack.studio:swap.swap_in.graph no
sixpack.studio;server1.sixpack.studio:swap.swap_in.type DERIVE
sixpack.studio;server1.sixpack.studio:swap.swap_out.update_rate 300
sixpack.studio;server1.sixpack.studio:swap.swap_out.type DERIVE
sixpack.studio;server1.sixpack.studio:swap.swap_out.min 0
sixpack.studio;server1.sixpack.studio:swap.swap_out.graph_data_size normal
sixpack.studio;server1.sixpack.studio:swap.swap_out.max 100000
sixpack.studio;server1.sixpack.studio:swap.swap_out.label swap
sixpack.studio;server1.sixpack.studio:swap.swap_out.negative swap_in
sixpack.studio;server1.sixpack.studio:df.graph_title Disk usage in percent
sixpack.studio;server1.sixpack.studio:df.graph_args --upper-limit 100 -l 0
sixpack.studio;server1.sixpack.studio:df.graph_vlabel %
sixpack.studio;server1.sixpack.studio:df.graph_scale no
sixpack.studio;server1.sixpack.studio:df.graph_category disk
sixpack.studio;server1.sixpack.studio:df.graph_order _dev_shm _run _sys_fs_cgroup _dev_vda2 _dev_loop0 _run_user_0 _run_user_978
sixpack.studio;server1.sixpack.studio:df._dev_shm.critical 98
sixpack.studio;server1.sixpack.studio:df._dev_shm.label /dev/shm
sixpack.studio;server1.sixpack.studio:df._dev_shm.warning 92
sixpack.studio;server1.sixpack.studio:df._dev_shm.graph_data_size normal
sixpack.studio;server1.sixpack.studio:df._dev_shm.update_rate 300
sixpack.studio;server1.sixpack.studio:df._sys_fs_cgroup.update_rate 300
sixpack.studio;server1.sixpack.studio:df._sys_fs_cgroup.critical 98
sixpack.studio;server1.sixpack.studio:df._sys_fs_cgroup.graph_data_size normal
sixpack.studio;server1.sixpack.studio:df._sys_fs_cgroup.label /sys/fs/cgroup
sixpack.studio;server1.sixpack.studio:df._sys_fs_cgroup.warning 92
sixpack.studio;server1.sixpack.studio:df._run_user_0.update_rate 300
sixpack.studio;server1.sixpack.studio:df._run_user_0.graph_data_size normal
sixpack.studio;server1.sixpack.studio:df._run_user_0.warning 92
sixpack.studio;server1.sixpack.studio:df._run_user_0.label /run/user/0
sixpack.studio;server1.sixpack.studio:df._run_user_0.critical 98
sixpack.studio;server1.sixpack.studio:df._run.update_rate 300
sixpack.studio;server1.sixpack.studio:df._run.label /run
sixpack.studio;server1.sixpack.studio:df._run.warning 92
sixpack.studio;server1.sixpack.studio:df._run.graph_data_size normal
sixpack.studio;server1.sixpack.studio:df._run.critical 98
sixpack.studio;server1.sixpack.studio:df._dev_loop0.critical 98
sixpack.studio;server1.sixpack.studio:df._dev_loop0.label /tmp
sixpack.studio;server1.sixpack.studio:df._dev_loop0.warning 92
sixpack.studio;server1.sixpack.studio:df._dev_loop0.graph_data_size normal
sixpack.studio;server1.sixpack.studio:df._dev_loop0.update_rate 300
sixpack.studio;server1.sixpack.studio:df._run_user_978.graph_data_size normal
sixpack.studio;server1.sixpack.studio:df._run_user_978.warning 92
sixpack.studio;server1.sixpack.studio:df._run_user_978.label /run/user/978
sixpack.studio;server1.sixpack.studio:df._run_user_978.critical 98
sixpack.studio;server1.sixpack.studio:df._run_user_978.update_rate 300
sixpack.studio;server1.sixpack.studio:df._dev_vda2.update_rate 300
sixpack.studio;server1.sixpack.studio:df._dev_vda2.critical 98
sixpack.studio;server1.sixpack.studio:df._dev_vda2.label /
sixpack.studio;server1.sixpack.studio:df._dev_vda2.warning 92
sixpack.studio;server1.sixpack.studio:df._dev_vda2.graph_data_size normal
sixpack.studio;server1.sixpack.studio:if_err_eth0.graph_order rcvd trans rcvd trans rxdrop txdrop collisions
sixpack.studio;server1.sixpack.studio:if_err_eth0.graph_title eth0 errors
sixpack.studio;server1.sixpack.studio:if_err_eth0.graph_args --base 1000
sixpack.studio;server1.sixpack.studio:if_err_eth0.graph_vlabel packets in (-) / out (+) per ${graph_period}
sixpack.studio;server1.sixpack.studio:if_err_eth0.graph_category network
sixpack.studio;server1.sixpack.studio:if_err_eth0.graph_info This graph shows the amount of errors, packet drops, and collisions on the eth0 network interface.
sixpack.studio;server1.sixpack.studio:if_err_eth0.collisions.update_rate 300
sixpack.studio;server1.sixpack.studio:if_err_eth0.collisions.type COUNTER
sixpack.studio;server1.sixpack.studio:if_err_eth0.collisions.label collisions
sixpack.studio;server1.sixpack.studio:if_err_eth0.collisions.graph_data_size normal
sixpack.studio;server1.sixpack.studio:if_err_eth0.txdrop.update_rate 300
sixpack.studio;server1.sixpack.studio:if_err_eth0.txdrop.type COUNTER
sixpack.studio;server1.sixpack.studio:if_err_eth0.txdrop.graph_data_size normal
sixpack.studio;server1.sixpack.studio:if_err_eth0.txdrop.label drops
sixpack.studio;server1.sixpack.studio:if_err_eth0.txdrop.negative rxdrop
sixpack.studio;server1.sixpack.studio:if_err_eth0.rcvd.update_rate 300
sixpack.studio;server1.sixpack.studio:if_err_eth0.rcvd.graph no
sixpack.studio;server1.sixpack.studio:if_err_eth0.rcvd.type COUNTER
sixpack.studio;server1.sixpack.studio:if_err_eth0.rcvd.warning 1
sixpack.studio;server1.sixpack.studio:if_err_eth0.rcvd.label errors
sixpack.studio;server1.sixpack.studio:if_err_eth0.rcvd.graph_data_size normal
sixpack.studio;server1.sixpack.studio:if_err_eth0.trans.graph_data_size normal
sixpack.studio;server1.sixpack.studio:if_err_eth0.trans.warning 1
sixpack.studio;server1.sixpack.studio:if_err_eth0.trans.label errors
sixpack.studio;server1.sixpack.studio:if_err_eth0.trans.negative rcvd
sixpack.studio;server1.sixpack.studio:if_err_eth0.trans.update_rate 300
sixpack.studio;server1.sixpack.studio:if_err_eth0.trans.type COUNTER
sixpack.studio;server1.sixpack.studio:if_err_eth0.rxdrop.graph no
sixpack.studio;server1.sixpack.studio:if_err_eth0.rxdrop.update_rate 300
sixpack.studio;server1.sixpack.studio:if_err_eth0.rxdrop.type COUNTER
sixpack.studio;server1.sixpack.studio:if_err_eth0.rxdrop.graph_data_size normal
sixpack.studio;server1.sixpack.studio:if_err_eth0.rxdrop.label drops
sixpack.studio;server1.sixpack.studio:diskstats_iops.loop0.graph_title IOs for /dev/loop0
sixpack.studio;server1.sixpack.studio:diskstats_iops.loop0.graph_args --base 1000
sixpack.studio;server1.sixpack.studio:diskstats_iops.loop0.graph_vlabel Units read (-) / write (+)
sixpack.studio;server1.sixpack.studio:diskstats_iops.loop0.graph_category disk
sixpack.studio;server1.sixpack.studio:diskstats_iops.loop0.graph_info This graph shows the number of IO operations pr second and the average size of these requests. Lots of small requests should result in in lower throughput (separate graph) and higher service time (separate graph). Please note that starting with munin-node 2.0 the divisor for K is 1000 instead of 1024 which it was prior to 2.0 beta 3. This is because the base for this graph is 1000 not 1024.
sixpack.studio;server1.sixpack.studio:diskstats_iops.loop0.graph_order rdio wrio avgrdrqsz avgwrrqsz
sixpack.studio;server1.sixpack.studio:diskstats_iops.loop0.avgrdrqsz.draw LINE1
sixpack.studio;server1.sixpack.studio:diskstats_iops.loop0.avgrdrqsz.label dummy
sixpack.studio;server1.sixpack.studio:diskstats_iops.loop0.avgrdrqsz.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_iops.loop0.avgrdrqsz.type GAUGE
sixpack.studio;server1.sixpack.studio:diskstats_iops.loop0.avgrdrqsz.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_iops.loop0.avgrdrqsz.graph no
sixpack.studio;server1.sixpack.studio:diskstats_iops.loop0.avgrdrqsz.min 0
sixpack.studio;server1.sixpack.studio:diskstats_iops.loop0.wrio.draw LINE1
sixpack.studio;server1.sixpack.studio:diskstats_iops.loop0.wrio.negative rdio
sixpack.studio;server1.sixpack.studio:diskstats_iops.loop0.wrio.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_iops.loop0.wrio.label IO/sec
sixpack.studio;server1.sixpack.studio:diskstats_iops.loop0.wrio.type GAUGE
sixpack.studio;server1.sixpack.studio:diskstats_iops.loop0.wrio.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_iops.loop0.wrio.min 0
sixpack.studio;server1.sixpack.studio:diskstats_iops.loop0.rdio.draw LINE1
sixpack.studio;server1.sixpack.studio:diskstats_iops.loop0.rdio.label dummy
sixpack.studio;server1.sixpack.studio:diskstats_iops.loop0.rdio.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_iops.loop0.rdio.min 0
sixpack.studio;server1.sixpack.studio:diskstats_iops.loop0.rdio.type GAUGE
sixpack.studio;server1.sixpack.studio:diskstats_iops.loop0.rdio.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_iops.loop0.rdio.graph no
sixpack.studio;server1.sixpack.studio:diskstats_iops.loop0.avgwrrqsz.min 0
sixpack.studio;server1.sixpack.studio:diskstats_iops.loop0.avgwrrqsz.type GAUGE
sixpack.studio;server1.sixpack.studio:diskstats_iops.loop0.avgwrrqsz.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_iops.loop0.avgwrrqsz.info Average Request Size in kilobytes (1000 based)
sixpack.studio;server1.sixpack.studio:diskstats_iops.loop0.avgwrrqsz.draw LINE1
sixpack.studio;server1.sixpack.studio:diskstats_iops.loop0.avgwrrqsz.negative avgrdrqsz
sixpack.studio;server1.sixpack.studio:diskstats_iops.loop0.avgwrrqsz.label Req Size (KB)
sixpack.studio;server1.sixpack.studio:diskstats_iops.loop0.avgwrrqsz.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_iops.sr0.graph_title IOs for /dev/sr0
sixpack.studio;server1.sixpack.studio:diskstats_iops.sr0.graph_args --base 1000
sixpack.studio;server1.sixpack.studio:diskstats_iops.sr0.graph_vlabel Units read (-) / write (+)
sixpack.studio;server1.sixpack.studio:diskstats_iops.sr0.graph_category disk
sixpack.studio;server1.sixpack.studio:diskstats_iops.sr0.graph_info This graph shows the number of IO operations pr second and the average size of these requests. Lots of small requests should result in in lower throughput (separate graph) and higher service time (separate graph). Please note that starting with munin-node 2.0 the divisor for K is 1000 instead of 1024 which it was prior to 2.0 beta 3. This is because the base for this graph is 1000 not 1024.
sixpack.studio;server1.sixpack.studio:diskstats_iops.sr0.graph_order rdio wrio avgrdrqsz avgwrrqsz
sixpack.studio;server1.sixpack.studio:diskstats_iops.sr0.wrio.type GAUGE
sixpack.studio;server1.sixpack.studio:diskstats_iops.sr0.wrio.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_iops.sr0.wrio.min 0
sixpack.studio;server1.sixpack.studio:diskstats_iops.sr0.wrio.draw LINE1
sixpack.studio;server1.sixpack.studio:diskstats_iops.sr0.wrio.negative rdio
sixpack.studio;server1.sixpack.studio:diskstats_iops.sr0.wrio.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_iops.sr0.wrio.label IO/sec
sixpack.studio;server1.sixpack.studio:diskstats_iops.sr0.avgrdrqsz.draw LINE1
sixpack.studio;server1.sixpack.studio:diskstats_iops.sr0.avgrdrqsz.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_iops.sr0.avgrdrqsz.label dummy
sixpack.studio;server1.sixpack.studio:diskstats_iops.sr0.avgrdrqsz.type GAUGE
sixpack.studio;server1.sixpack.studio:diskstats_iops.sr0.avgrdrqsz.graph no
sixpack.studio;server1.sixpack.studio:diskstats_iops.sr0.avgrdrqsz.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_iops.sr0.avgrdrqsz.min 0
sixpack.studio;server1.sixpack.studio:diskstats_iops.sr0.avgwrrqsz.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_iops.sr0.avgwrrqsz.label Req Size (KB)
sixpack.studio;server1.sixpack.studio:diskstats_iops.sr0.avgwrrqsz.draw LINE1
sixpack.studio;server1.sixpack.studio:diskstats_iops.sr0.avgwrrqsz.negative avgrdrqsz
sixpack.studio;server1.sixpack.studio:diskstats_iops.sr0.avgwrrqsz.info Average Request Size in kilobytes (1000 based)
sixpack.studio;server1.sixpack.studio:diskstats_iops.sr0.avgwrrqsz.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_iops.sr0.avgwrrqsz.type GAUGE
sixpack.studio;server1.sixpack.studio:diskstats_iops.sr0.avgwrrqsz.min 0
sixpack.studio;server1.sixpack.studio:diskstats_iops.sr0.rdio.label dummy
sixpack.studio;server1.sixpack.studio:diskstats_iops.sr0.rdio.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_iops.sr0.rdio.draw LINE1
sixpack.studio;server1.sixpack.studio:diskstats_iops.sr0.rdio.min 0
sixpack.studio;server1.sixpack.studio:diskstats_iops.sr0.rdio.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_iops.sr0.rdio.graph no
sixpack.studio;server1.sixpack.studio:diskstats_iops.sr0.rdio.type GAUGE
sixpack.studio;server1.sixpack.studio:mysql_innodb.graph_title MySQL InnoDB free tablespace
sixpack.studio;server1.sixpack.studio:mysql_innodb.graph_args --base 1024
sixpack.studio;server1.sixpack.studio:mysql_innodb.graph_vlabel Bytes
sixpack.studio;server1.sixpack.studio:mysql_innodb.graph_category mysql
sixpack.studio;server1.sixpack.studio:mysql_innodb.graph_info Free bytes in the InnoDB tablespace
sixpack.studio;server1.sixpack.studio:mysql_innodb.graph_order free
sixpack.studio;server1.sixpack.studio:mysql_innodb.free.type GAUGE
sixpack.studio;server1.sixpack.studio:mysql_innodb.free.update_rate 300
sixpack.studio;server1.sixpack.studio:mysql_innodb.free.min 0
sixpack.studio;server1.sixpack.studio:mysql_innodb.free.critical 1073741824:
sixpack.studio;server1.sixpack.studio:mysql_innodb.free.warning 2147483648:
sixpack.studio;server1.sixpack.studio:mysql_innodb.free.label Bytes free
sixpack.studio;server1.sixpack.studio:mysql_innodb.free.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_latency.graph_title Disk latency per device
sixpack.studio;server1.sixpack.studio:diskstats_latency.graph_args --base 1000
sixpack.studio;server1.sixpack.studio:diskstats_latency.graph_vlabel Average IO Wait (seconds)
sixpack.studio;server1.sixpack.studio:diskstats_latency.graph_category disk
sixpack.studio;server1.sixpack.studio:diskstats_latency.graph_width 400
sixpack.studio;server1.sixpack.studio:diskstats_latency.graph_order loop0_avgwait sr0_avgwait vda_avgwait
sixpack.studio;server1.sixpack.studio:diskstats_latency.sr0_avgwait.label sr0
sixpack.studio;server1.sixpack.studio:diskstats_latency.sr0_avgwait.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_latency.sr0_avgwait.draw LINE1
sixpack.studio;server1.sixpack.studio:diskstats_latency.sr0_avgwait.min 0
sixpack.studio;server1.sixpack.studio:diskstats_latency.sr0_avgwait.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_latency.sr0_avgwait.info Average wait time for an I/O request
sixpack.studio;server1.sixpack.studio:diskstats_latency.sr0_avgwait.type GAUGE
sixpack.studio;server1.sixpack.studio:diskstats_latency.loop0_avgwait.min 0
sixpack.studio;server1.sixpack.studio:diskstats_latency.loop0_avgwait.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_latency.loop0_avgwait.info Average wait time for an I/O request
sixpack.studio;server1.sixpack.studio:diskstats_latency.loop0_avgwait.type GAUGE
sixpack.studio;server1.sixpack.studio:diskstats_latency.loop0_avgwait.label loop0
sixpack.studio;server1.sixpack.studio:diskstats_latency.loop0_avgwait.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_latency.loop0_avgwait.draw LINE1
sixpack.studio;server1.sixpack.studio:diskstats_latency.vda_avgwait.min 0
sixpack.studio;server1.sixpack.studio:diskstats_latency.vda_avgwait.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_latency.vda_avgwait.info Average wait time for an I/O request
sixpack.studio;server1.sixpack.studio:diskstats_latency.vda_avgwait.type GAUGE
sixpack.studio;server1.sixpack.studio:diskstats_latency.vda_avgwait.label vda
sixpack.studio;server1.sixpack.studio:diskstats_latency.vda_avgwait.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_latency.vda_avgwait.draw LINE1
sixpack.studio;server1.sixpack.studio:mysql_threads.graph_title MySQL threads
sixpack.studio;server1.sixpack.studio:mysql_threads.graph_vlabel threads
sixpack.studio;server1.sixpack.studio:mysql_threads.graph_category mysql
sixpack.studio;server1.sixpack.studio:mysql_threads.graph_info Note that this is a old plugin which is no longer installed by default. It is retained for compatability with old installations.
sixpack.studio;server1.sixpack.studio:mysql_threads.graph_args --base 1000
sixpack.studio;server1.sixpack.studio:mysql_threads.graph_order threads
sixpack.studio;server1.sixpack.studio:mysql_threads.threads.update_rate 300
sixpack.studio;server1.sixpack.studio:mysql_threads.threads.label mysql threads
sixpack.studio;server1.sixpack.studio:mysql_threads.threads.graph_data_size normal
sixpack.studio;server1.sixpack.studio:apache_accesses.graph_title Apache accesses
sixpack.studio;server1.sixpack.studio:apache_accesses.graph_args --base 1000
sixpack.studio;server1.sixpack.studio:apache_accesses.graph_vlabel accesses / ${graph_period}
sixpack.studio;server1.sixpack.studio:apache_accesses.graph_category apache
sixpack.studio;server1.sixpack.studio:apache_accesses.graph_order accesses80
sixpack.studio;server1.sixpack.studio:apache_accesses.accesses80.min 0
sixpack.studio;server1.sixpack.studio:apache_accesses.accesses80.type DERIVE
sixpack.studio;server1.sixpack.studio:apache_accesses.accesses80.info The number of accesses (pages and other items served) globally on the Apache server
sixpack.studio;server1.sixpack.studio:apache_accesses.accesses80.update_rate 300
sixpack.studio;server1.sixpack.studio:apache_accesses.accesses80.graph_data_size normal
sixpack.studio;server1.sixpack.studio:apache_accesses.accesses80.max 1000000
sixpack.studio;server1.sixpack.studio:apache_accesses.accesses80.label port 80
sixpack.studio;server1.sixpack.studio:diskstats_latency.vda.graph_title Average latency for /dev/vda
sixpack.studio;server1.sixpack.studio:diskstats_latency.vda.graph_args --base 1000 --logarithmic
sixpack.studio;server1.sixpack.studio:diskstats_latency.vda.graph_vlabel seconds
sixpack.studio;server1.sixpack.studio:diskstats_latency.vda.graph_category disk
sixpack.studio;server1.sixpack.studio:diskstats_latency.vda.graph_info This graph shows average waiting time/latency for different categories of disk operations. The times that include the queue times indicate how busy your system is. If the waiting time hits 1 second then your I/O system is 100% busy.
sixpack.studio;server1.sixpack.studio:diskstats_latency.vda.graph_order svctm avgwait avgrdwait avgwrwait
sixpack.studio;server1.sixpack.studio:diskstats_latency.vda.svctm.type GAUGE
sixpack.studio;server1.sixpack.studio:diskstats_latency.vda.svctm.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_latency.vda.svctm.info Average time an I/O takes on the block device not including any queue times, just the round trip time for the disk request.
sixpack.studio;server1.sixpack.studio:diskstats_latency.vda.svctm.min 0
sixpack.studio;server1.sixpack.studio:diskstats_latency.vda.svctm.draw LINE1
sixpack.studio;server1.sixpack.studio:diskstats_latency.vda.svctm.label Device IO time
sixpack.studio;server1.sixpack.studio:diskstats_latency.vda.svctm.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_latency.vda.avgwait.info Average wait time for an I/O from request start to finish (includes queue times et al)
sixpack.studio;server1.sixpack.studio:diskstats_latency.vda.avgwait.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_latency.vda.avgwait.type GAUGE
sixpack.studio;server1.sixpack.studio:diskstats_latency.vda.avgwait.min 0
sixpack.studio;server1.sixpack.studio:diskstats_latency.vda.avgwait.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_latency.vda.avgwait.label IO Wait time
sixpack.studio;server1.sixpack.studio:diskstats_latency.vda.avgwait.draw LINE1
sixpack.studio;server1.sixpack.studio:diskstats_latency.vda.avgwrwait.min 0
sixpack.studio;server1.sixpack.studio:diskstats_latency.vda.avgwrwait.type GAUGE
sixpack.studio;server1.sixpack.studio:diskstats_latency.vda.avgwrwait.update_rate 300
sixpack.studio;server1.sixpack.studio:diskstats_latency.vda.avgwrwait.info Average wait time for a write I/O from request start to finish (includes queue times et al)
sixpack.studio;server1.sixpack.studio:diskstats_latency.vda.avgwrwait.draw LINE1
sixpack.studio;server1.sixpack.studio:diskstats_latency.vda.avgwrwait.warning 0:3
sixpack.studio;server1.sixpack.studio:diskstats_latency.vda.avgwrwait.label Write IO Wait time
sixpack.studio;server1.sixpack.studio:diskstats_latency.vda.avgwrwait.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_latency.vda.avgrdwait.draw LINE1
sixpack.studio;server1.sixpack.studio:diskstats_latency.vda.avgrdwait.graph_data_size normal
sixpack.studio;server1.sixpack.studio:diskstats_latency.vda.avgrdwait.warning 0:3
sixpack.studio;server1.sixpack.studio:diskstats_latency.vda.avgrdwait.label Read IO Wait time
sixpack.studio;server1.sixpack.studio:diskstats_latency.vda.avgrdwait.min 0
sixpack.studio;server1.sixpack.studio:diskstats_latency.vda.avgrdwait.type GAUGE
sixpack.studio;server1.sixpack.studio:diskstats_latency.vda.avgrdwait.info Average wait time for a read I/O from request start to finish (includes queue times et al)
sixpack.studio;server1.sixpack.studio:diskstats_latency.vda.avgrdwait.update_rate 300