Additional scripts for Zabbix agent on Linux to discover and monitor several services
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 

923 lines
48 KiB

<?xml version="1.0" encoding="UTF-8"?>
<zabbix_export>
<version>5.0</version>
<date>2021-09-22T16:02:43Z</date>
<groups>
<group>
<name>Templates</name>
</group>
</groups>
<templates>
<template>
<template>Template_App_BackupPC</template>
<name>Template_App_BackupPC</name>
<groups>
<group>
<name>Templates</name>
</group>
</groups>
<applications>
<application>
<name>BackupPC</name>
</application>
</applications>
<items>
<item>
<name>BackupPC: General info</name>
<key>backuppc.general</key>
<delay>15m</delay>
<history>0</history>
<trends>0</trends>
<value_type>TEXT</value_type>
<applications>
<application>
<name>BackupPC</name>
</application>
</applications>
</item>
<item>
<name>BackupPC: Total number of backups</name>
<type>DEPENDENT</type>
<key>backuppc.general[bkp]</key>
<delay>0</delay>
<history>60d</history>
<trends>1095d</trends>
<units>!backup(s)</units>
<applications>
<application>
<name>BackupPC</name>
</application>
</applications>
<preprocessing>
<step>
<type>JSONPATH</type>
<params>$.bkp</params>
</step>
</preprocessing>
<master_item>
<key>backuppc.general</key>
</master_item>
</item>
<item>
<name>BackupPC: Total full size</name>
<type>DEPENDENT</type>
<key>backuppc.general[full_size]</key>
<delay>0</delay>
<history>60d</history>
<trends>1095d</trends>
<units>B</units>
<applications>
<application>
<name>BackupPC</name>
</application>
</applications>
<preprocessing>
<step>
<type>JSONPATH</type>
<params>$.full_size</params>
</step>
</preprocessing>
<master_item>
<key>backuppc.general</key>
</master_item>
</item>
<item>
<name>BackupPC: Total history size</name>
<type>DEPENDENT</type>
<key>backuppc.general[history_size]</key>
<delay>0</delay>
<history>60d</history>
<trends>1095d</trends>
<units>B</units>
<applications>
<application>
<name>BackupPC</name>
</application>
</applications>
<preprocessing>
<step>
<type>JSONPATH</type>
<params>$.history_size</params>
</step>
</preprocessing>
<master_item>
<key>backuppc.general</key>
</master_item>
</item>
<item>
<name>BackupPC: Total number of hosts</name>
<type>DEPENDENT</type>
<key>backuppc.general[hosts]</key>
<delay>0</delay>
<history>60d</history>
<trends>1095d</trends>
<units>!host(s)</units>
<applications>
<application>
<name>BackupPC</name>
</application>
</applications>
<preprocessing>
<step>
<type>JSONPATH</type>
<params>$.hosts</params>
</step>
</preprocessing>
<master_item>
<key>backuppc.general</key>
</master_item>
</item>
<item>
<name>BackupPC: Global perf score</name>
<type>DEPENDENT</type>
<key>backuppc.general[perf]</key>
<delay>0</delay>
<history>60d</history>
<trends>1095d</trends>
<value_type>FLOAT</value_type>
<applications>
<application>
<name>BackupPC</name>
</application>
</applications>
<preprocessing>
<step>
<type>JSONPATH</type>
<params>$.perf</params>
</step>
</preprocessing>
<master_item>
<key>backuppc.general</key>
</master_item>
</item>
<item>
<name>BackupPC: Global compression ratio</name>
<type>DEPENDENT</type>
<key>backuppc.general[ratio]</key>
<delay>0</delay>
<history>60d</history>
<trends>1095d</trends>
<value_type>FLOAT</value_type>
<applications>
<application>
<name>BackupPC</name>
</application>
</applications>
<preprocessing>
<step>
<type>JSONPATH</type>
<params>$.ratio</params>
</step>
</preprocessing>
<master_item>
<key>backuppc.general</key>
</master_item>
</item>
<item>
<name>BackupPC: Total size</name>
<type>DEPENDENT</type>
<key>backuppc.general[total_size]</key>
<delay>0</delay>
<history>60d</history>
<trends>1095d</trends>
<units>B</units>
<applications>
<application>
<name>BackupPC</name>
</application>
</applications>
<preprocessing>
<step>
<type>JSONPATH</type>
<params>$.total_size</params>
</step>
</preprocessing>
<master_item>
<key>backuppc.general</key>
</master_item>
</item>
<item>
<name>Number of BackupPC processes</name>
<key>proc.num[,backuppc,,BackupPC]</key>
<delay>30m</delay>
<history>60d</history>
<trends>0</trends>
<applications>
<application>
<name>BackupPC</name>
</application>
</applications>
<request_method>POST</request_method>
</item>
<item>
<name>Number of raidsync processes</name>
<key>proc.num[,root,,BackupPC_raidsync]</key>
<delay>10m</delay>
<history>60d</history>
<trends>0</trends>
<applications>
<application>
<name>BackupPC</name>
</application>
</applications>
<request_method>POST</request_method>
<triggers>
<trigger>
<expression>{sum({$EXT_BACKUPS})}&lt;1 and {$EXT_BACKUPS} &gt; 0</expression>
<name>No recent copy on external support</name>
<priority>WARNING</priority>
</trigger>
</triggers>
</item>
</items>
<discovery_rules>
<discovery_rule>
<name>BackupPC: Entity discovery</name>
<key>backuppc.entity.discovery[]</key>
<delay>2h</delay>
<item_prototypes>
<item_prototype>
<name>BackupPC: Number of backups for {#BPC_ENTITY}</name>
<type>DEPENDENT</type>
<key>backuppc.entity[{#BPC_ENTITY},bkp]</key>
<delay>0</delay>
<history>60d</history>
<trends>1095d</trends>
<applications>
<application>
<name>BackupPC</name>
</application>
</applications>
<preprocessing>
<step>
<type>JSONPATH</type>
<params>$.bkp</params>
</step>
</preprocessing>
<master_item>
<key>backuppc.entity[{#BPC_ENTITY}]</key>
</master_item>
</item_prototype>
<item_prototype>
<name>BackupPC: Sum of last full sizes for {#BPC_ENTITY}</name>
<type>DEPENDENT</type>
<key>backuppc.entity[{#BPC_ENTITY},full_size]</key>
<delay>0</delay>
<history>60d</history>
<trends>1095d</trends>
<units>o</units>
<applications>
<application>
<name>BackupPC</name>
</application>
</applications>
<preprocessing>
<step>
<type>JSONPATH</type>
<params>$.full_size</params>
</step>
</preprocessing>
<master_item>
<key>backuppc.entity[{#BPC_ENTITY}]</key>
</master_item>
</item_prototype>
<item_prototype>
<name>BackupPC: Sum of history sizes for {#BPC_ENTITY}</name>
<type>DEPENDENT</type>
<key>backuppc.entity[{#BPC_ENTITY},history_size]</key>
<delay>0</delay>
<history>60d</history>
<trends>1095d</trends>
<units>o</units>
<applications>
<application>
<name>BackupPC</name>
</application>
</applications>
<preprocessing>
<step>
<type>JSONPATH</type>
<params>$.history_size</params>
</step>
</preprocessing>
<master_item>
<key>backuppc.entity[{#BPC_ENTITY}]</key>
</master_item>
</item_prototype>
<item_prototype>
<name>BackupPC: Number of backed up hosts for {#BPC_ENTITY}</name>
<type>DEPENDENT</type>
<key>backuppc.entity[{#BPC_ENTITY},hosts]</key>
<delay>0</delay>
<history>60d</history>
<trends>1095d</trends>
<applications>
<application>
<name>BackupPC</name>
</application>
</applications>
<preprocessing>
<step>
<type>JSONPATH</type>
<params>$.hosts</params>
</step>
</preprocessing>
<master_item>
<key>backuppc.entity[{#BPC_ENTITY}]</key>
</master_item>
</item_prototype>
<item_prototype>
<name>BackupPC: Performance indicator for {#BPC_ENTITY}</name>
<type>DEPENDENT</type>
<key>backuppc.entity[{#BPC_ENTITY},perf]</key>
<delay>0</delay>
<history>60d</history>
<trends>1095d</trends>
<value_type>FLOAT</value_type>
<units>!h/j</units>
<applications>
<application>
<name>BackupPC</name>
</application>
</applications>
<preprocessing>
<step>
<type>JSONPATH</type>
<params>$.perf</params>
</step>
</preprocessing>
<master_item>
<key>backuppc.entity[{#BPC_ENTITY}]</key>
</master_item>
</item_prototype>
<item_prototype>
<name>BackupPC: Compression ratio for {#BPC_ENTITY}</name>
<type>DEPENDENT</type>
<key>backuppc.entity[{#BPC_ENTITY},ratio]</key>
<delay>0</delay>
<history>60d</history>
<trends>1095d</trends>
<value_type>FLOAT</value_type>
<units>%</units>
<applications>
<application>
<name>BackupPC</name>
</application>
</applications>
<preprocessing>
<step>
<type>JSONPATH</type>
<params>$.ratio</params>
</step>
</preprocessing>
<master_item>
<key>backuppc.entity[{#BPC_ENTITY}]</key>
</master_item>
</item_prototype>
<item_prototype>
<name>BackupPC: Total backups size for {#BPC_ENTITY}</name>
<type>DEPENDENT</type>
<key>backuppc.entity[{#BPC_ENTITY},size]</key>
<delay>0</delay>
<history>60d</history>
<trends>1095d</trends>
<units>o</units>
<applications>
<application>
<name>BackupPC</name>
</application>
</applications>
<preprocessing>
<step>
<type>JSONPATH</type>
<params>$.size</params>
</step>
</preprocessing>
<master_item>
<key>backuppc.entity[{#BPC_ENTITY}]</key>
</master_item>
</item_prototype>
<item_prototype>
<name>BackupPC: Info for entity {#BPC_ENTITY}</name>
<key>backuppc.entity[{#BPC_ENTITY}]</key>
<delay>1h</delay>
<history>0</history>
<trends>0</trends>
<value_type>TEXT</value_type>
<applications>
<application>
<name>BackupPC</name>
</application>
</applications>
</item_prototype>
</item_prototypes>
<graph_prototypes>
<graph_prototype>
<name>BackupPC: Entity {#BPC_ENTITY}: Compression ratio and perf</name>
<ymin_type_1>FIXED</ymin_type_1>
<graph_items>
<graph_item>
<drawtype>GRADIENT_LINE</drawtype>
<color>43A047</color>
<item>
<host>Template_App_BackupPC</host>
<key>backuppc.entity[{#BPC_ENTITY},size]</key>
</item>
</graph_item>
<graph_item>
<sortorder>1</sortorder>
<drawtype>BOLD_LINE</drawtype>
<color>FF0000</color>
<yaxisside>RIGHT</yaxisside>
<item>
<host>Template_App_BackupPC</host>
<key>backuppc.entity[{#BPC_ENTITY},ratio]</key>
</item>
</graph_item>
<graph_item>
<sortorder>2</sortorder>
<drawtype>DASHED_LINE</drawtype>
<color>0040FF</color>
<yaxisside>RIGHT</yaxisside>
<item>
<host>Template_App_BackupPC</host>
<key>backuppc.entity[{#BPC_ENTITY},perf]</key>
</item>
</graph_item>
</graph_items>
</graph_prototype>
<graph_prototype>
<name>BackupPC: Entity {#BPC_ENTITY}: Full / History sizes</name>
<type>STACKED</type>
<graph_items>
<graph_item>
<color>4000FF</color>
<item>
<host>Template_App_BackupPC</host>
<key>backuppc.entity[{#BPC_ENTITY},full_size]</key>
</item>
</graph_item>
<graph_item>
<sortorder>1</sortorder>
<color>4DD0E1</color>
<item>
<host>Template_App_BackupPC</host>
<key>backuppc.entity[{#BPC_ENTITY},history_size]</key>
</item>
</graph_item>
</graph_items>
</graph_prototype>
</graph_prototypes>
</discovery_rule>
<discovery_rule>
<name>BackupPC: Hosts discovery</name>
<key>backuppc.host.discovery[]</key>
<delay>2h;50s/1-7,00:00-24:00</delay>
<filter>
<evaltype>OR</evaltype>
</filter>
<lifetime>15d</lifetime>
<item_prototypes>
<item_prototype>
<name>BackupPC: {#BPCHOST}: Last backup age</name>
<type>DEPENDENT</type>
<key>backuppc.host[{#BPCHOST},age]</key>
<delay>0</delay>
<history>60d</history>
<trends>1095d</trends>
<units>s</units>
<applications>
<application>
<name>BackupPC</name>
</application>
</applications>
<preprocessing>
<step>
<type>JSONPATH</type>
<params>$.age</params>
</step>
</preprocessing>
<master_item>
<key>backuppc.host[{#BPCHOST},all]</key>
</master_item>
</item_prototype>
<item_prototype>
<name>BackupPC: Info de {#BPCHOST}</name>
<key>backuppc.host[{#BPCHOST},all]</key>
<delay>30m</delay>
<history>0</history>
<trends>0</trends>
<value_type>TEXT</value_type>
<applications>
<application>
<name>BackupPC</name>
</application>
</applications>
</item_prototype>
<item_prototype>
<name>BackupPC: {#BPCHOST}: Number of backups</name>
<type>DEPENDENT</type>
<key>backuppc.host[{#BPCHOST},bkp]</key>
<delay>0</delay>
<history>60d</history>
<trends>1095d</trends>
<units>!backups</units>
<applications>
<application>
<name>BackupPC</name>
</application>
</applications>
<preprocessing>
<step>
<type>JSONPATH</type>
<params>$.bkp</params>
</step>
</preprocessing>
<master_item>
<key>backuppc.host[{#BPCHOST},all]</key>
</master_item>
</item_prototype>
<item_prototype>
<name>BackuPPC: {#BPCHOST}: Compression ratio of last backup</name>
<type>DEPENDENT</type>
<key>backuppc.host[{#BPCHOST},comp_ratio]</key>
<delay>0</delay>
<history>60d</history>
<trends>1095d</trends>
<value_type>FLOAT</value_type>
<units>%</units>
<applications>
<application>
<name>BackupPC</name>
</application>
</applications>
<preprocessing>
<step>
<type>JSONPATH</type>
<params>$.comp_ratio</params>
</step>
</preprocessing>
<master_item>
<key>backuppc.host[{#BPCHOST},all]</key>
</master_item>
</item_prototype>
<item_prototype>
<name>BackupPC: {#BPCHOST}: Last backup duration</name>
<type>DEPENDENT</type>
<key>backuppc.host[{#BPCHOST},duration]</key>
<delay>0</delay>
<history>60d</history>
<trends>1095d</trends>
<units>s</units>
<applications>
<application>
<name>BackupPC</name>
</application>
</applications>
<preprocessing>
<step>
<type>JSONPATH</type>
<params>$.duration</params>
</step>
</preprocessing>
<master_item>
<key>backuppc.host[{#BPCHOST},all]</key>
</master_item>
</item_prototype>
<item_prototype>
<name>BackupPC: {#BPCHOST}: Backups status</name>
<type>DEPENDENT</type>
<key>backuppc.host[{#BPCHOST},enabled]</key>
<delay>0</delay>
<history>60d</history>
<trends>1095d</trends>
<applications>
<application>
<name>BackupPC</name>
</application>
</applications>
<preprocessing>
<step>
<type>JSONPATH</type>
<params>$.enabled</params>
</step>
</preprocessing>
<master_item>
<key>backuppc.host[{#BPCHOST},all]</key>
</master_item>
</item_prototype>
<item_prototype>
<name>BackupPC: {#BPCHOST}: Number of errors on last backup</name>
<type>DEPENDENT</type>
<key>backuppc.host[{#BPCHOST},errors]</key>
<delay>0</delay>
<history>60d</history>
<trends>1095d</trends>
<units>!errors</units>
<applications>
<application>
<name>BackupPC</name>
</application>
</applications>
<preprocessing>
<step>
<type>JSONPATH</type>
<params>$.errors</params>
</step>
</preprocessing>
<master_item>
<key>backuppc.host[{#BPCHOST},all]</key>
</master_item>
</item_prototype>
<item_prototype>
<name>BackupPC: {#BPCHOST}: Last full backup size</name>
<type>DEPENDENT</type>
<key>backuppc.host[{#BPCHOST},full_size]</key>
<delay>0</delay>
<history>60d</history>
<trends>1095d</trends>
<units>B</units>
<applications>
<application>
<name>BackupPC</name>
</application>
</applications>
<preprocessing>
<step>
<type>JSONPATH</type>
<params>$.full_size</params>
</step>
</preprocessing>
<master_item>
<key>backuppc.host[{#BPCHOST},all]</key>
</master_item>
</item_prototype>
<item_prototype>
<name>BackupPC: {#BPCHOST}: History size</name>
<type>DEPENDENT</type>
<key>backuppc.host[{#BPCHOST},history_size]</key>
<delay>0</delay>
<history>60d</history>
<trends>1095d</trends>
<units>B</units>
<applications>
<application>
<name>BackupPC</name>
</application>
</applications>
<preprocessing>
<step>
<type>JSONPATH</type>
<params>$.history_size</params>
</step>
</preprocessing>
<master_item>
<key>backuppc.host[{#BPCHOST},all]</key>
</master_item>
</item_prototype>
<item_prototype>
<name>BackupPC: {#BPCHOST}: New file size in last backup</name>
<type>DEPENDENT</type>
<key>backuppc.host[{#BPCHOST},new_size]</key>
<delay>0</delay>
<history>60d</history>
<trends>1095d</trends>
<units>B</units>
<applications>
<application>
<name>BackupPC</name>
</application>
</applications>
<preprocessing>
<step>
<type>JSONPATH</type>
<params>$.new_size</params>
</step>
</preprocessing>
<master_item>
<key>backuppc.host[{#BPCHOST},all]</key>
</master_item>
</item_prototype>
<item_prototype>
<name>BackupPC: {#BPCHOST}: Average new sizes</name>
<type>DEPENDENT</type>
<key>backuppc.host[{#BPCHOST},new_size_avg]</key>
<delay>0</delay>
<history>60d</history>
<trends>1095d</trends>
<units>B</units>
<applications>
<application>
<name>BackupPC</name>
</application>
</applications>
<preprocessing>
<step>
<type>JSONPATH</type>
<params>$.new_size_avg</params>
</step>
</preprocessing>
<master_item>
<key>backuppc.host[{#BPCHOST},all]</key>
</master_item>
</item_prototype>
<item_prototype>
<name>BackupPC: {#BPCHOST}: Median for new files sizes</name>
<type>DEPENDENT</type>
<key>backuppc.host[{#BPCHOST},new_size_median]</key>
<delay>0</delay>
<history>60d</history>
<trends>1095d</trends>
<units>B</units>
<applications>
<application>
<name>BackupPC</name>
</application>
</applications>
<preprocessing>
<step>
<type>JSONPATH</type>
<params>$.new_size_median</params>
</step>
</preprocessing>
<master_item>
<key>backuppc.host[{#BPCHOST},all]</key>
</master_item>
</item_prototype>
<item_prototype>
<name>BackupPC: {#BPCHOST}: First quartile of new sizes</name>
<type>DEPENDENT</type>
<key>backuppc.host[{#BPCHOST},new_size_q1]</key>
<delay>0</delay>
<history>60d</history>
<trends>1095d</trends>
<units>B</units>
<applications>
<application>
<name>BackupPC</name>
</application>
</applications>
<preprocessing>
<step>
<type>JSONPATH</type>
<params>$.new_size_q1</params>
</step>
</preprocessing>
<master_item>
<key>backuppc.host[{#BPCHOST},all]</key>
</master_item>
</item_prototype>
<item_prototype>
<name>BackupPC: {#BPCHOST}: Third quartile of new sizes</name>
<type>DEPENDENT</type>
<key>backuppc.host[{#BPCHOST},new_size_q3]</key>
<delay>0</delay>
<history>60d</history>
<trends>1095d</trends>
<units>B</units>
<applications>
<application>
<name>BackupPC</name>
</application>
</applications>
<preprocessing>
<step>
<type>JSONPATH</type>
<params>$.new_size_q3</params>
</step>
</preprocessing>
<master_item>
<key>backuppc.host[{#BPCHOST},all]</key>
</master_item>
</item_prototype>
<item_prototype>
<name>BackupPC: {#BPCHOST}: Total backups size</name>
<type>DEPENDENT</type>
<key>backuppc.host[{#BPCHOST},total_size]</key>
<delay>0</delay>
<history>60d</history>
<trends>1095d</trends>
<units>B</units>
<applications>
<application>
<name>BackupPC</name>
</application>
</applications>
<preprocessing>
<step>
<type>JSONPATH</type>
<params>$.total_size</params>
</step>
</preprocessing>
<master_item>
<key>backuppc.host[{#BPCHOST},all]</key>
</master_item>
</item_prototype>
</item_prototypes>
<trigger_prototypes>
<trigger_prototype>
<expression>{Template_App_BackupPC:backuppc.host[{#BPCHOST},age].last(0)}&gt;{#BPCNOBACKUPWARNING}*24*3600 and {Template_App_BackupPC:backuppc.host[{#BPCHOST},enabled].last()}=1</expression>
<name>No backup for {#BPCHOST} since {ITEM.LASTVALUE1}</name>
<priority>WARNING</priority>
</trigger_prototype>
<trigger_prototype>
<expression>{Template_App_BackupPC:backuppc.host[{#BPCHOST},enabled].last()}=1 and {Template_App_BackupPC:backuppc.host[{#BPCHOST},bkp].last()}&gt;4 and {Template_App_BackupPC:backuppc.host[{#BPCHOST},new_size].last()} &gt; ({Template_App_BackupPC:backuppc.host[{#BPCHOST},new_size_q3].last()} + ({Template_App_BackupPC:backuppc.host[{#BPCHOST},new_size_q3].last()} - {Template_App_BackupPC:backuppc.host[{#BPCHOST},new_size_q1].last()}) * 1.5) and {Template_App_BackupPC:backuppc.host[{#BPCHOST},new_size].last()} &gt; {Template_App_BackupPC:backuppc.host[{#BPCHOST},new_size_avg].last()}*{#BPC_TOO_BIG_FACTOR} and {Template_App_BackupPC:backuppc.host[{#BPCHOST},new_size].dayofweek()} &lt;&gt; 7 and {Template_App_BackupPC:backuppc.host[{#BPCHOST},new_size].dayofweek()} &lt;&gt; 1 and ({Template_App_BackupPC:backuppc.host[{#BPCHOST},new_size].dayofweek()} &lt;&gt; 2 or {Template_App_BackupPC:backuppc.host[{#BPCHOST},new_size].time()} &gt; {$BPC_SIZE_WARN_FROM_HOUR})</expression>
<recovery_mode>RECOVERY_EXPRESSION</recovery_mode>
<recovery_expression>{Template_App_BackupPC:backuppc.host[{#BPCHOST},enabled].last()}=0 or {Template_App_BackupPC:backuppc.host[{#BPCHOST},bkp].last()}&lt;=4 or {Template_App_BackupPC:backuppc.host[{#BPCHOST},new_size].last()} &lt;= ({Template_App_BackupPC:backuppc.host[{#BPCHOST},new_size_q3].last()} + ({Template_App_BackupPC:backuppc.host[{#BPCHOST},new_size_q3].last()} - {Template_App_BackupPC:backuppc.host[{#BPCHOST},new_size_q1].last()}) * 1.5) or {Template_App_BackupPC:backuppc.host[{#BPCHOST},new_size].last()} &lt;= {Template_App_BackupPC:backuppc.host[{#BPCHOST},new_size_avg].last()}*{#BPC_TOO_BIG_FACTOR}</recovery_expression>
<name>Suspiciously big ({ITEM.VALUE3}) backup for {#BPCHOST}</name>
<priority>INFO</priority>
</trigger_prototype>
<trigger_prototype>
<expression>{#BPC_TOO_SMALL_FACTOR}&gt;0 and {Template_App_BackupPC:backuppc.host[{#BPCHOST},enabled].last()}=1 and {Template_App_BackupPC:backuppc.host[{#BPCHOST},bkp].last()}&gt;4 and {Template_App_BackupPC:backuppc.host[{#BPCHOST},new_size].last()} &lt; ({Template_App_BackupPC:backuppc.host[{#BPCHOST},new_size_q1].last()} - ({Template_App_BackupPC:backuppc.host[{#BPCHOST},new_size_q3].last()} - {Template_App_BackupPC:backuppc.host[{#BPCHOST},new_size_q1].last()}) * 1.5) and {Template_App_BackupPC:backuppc.host[{#BPCHOST},new_size].last()} &lt; {Template_App_BackupPC:backuppc.host[{#BPCHOST},new_size_avg].last()}/{#BPC_TOO_SMALL_FACTOR} and {Template_App_BackupPC:backuppc.host[{#BPCHOST},new_size].dayofweek()} &lt;&gt; 6 and {Template_App_BackupPC:backuppc.host[{#BPCHOST},new_size].dayofweek()} &lt;&gt; 7 and {Template_App_BackupPC:backuppc.host[{#BPCHOST},new_size].dayofweek()} &lt;&gt; 1 and ({Template_App_BackupPC:backuppc.host[{#BPCHOST},new_size].dayofweek()} &lt;&gt; 2 or {Template_App_BackupPC:backuppc.host[{#BPCHOST},new_size].time()} &gt; {$BPC_SIZE_WARN_FROM_HOUR})</expression>
<recovery_mode>RECOVERY_EXPRESSION</recovery_mode>
<recovery_expression>{#BPC_TOO_SMALL_FACTOR}&lt;0 or {Template_App_BackupPC:backuppc.host[{#BPCHOST},enabled].last()}=0 or {Template_App_BackupPC:backuppc.host[{#BPCHOST},bkp].last()}&lt;=4 or {Template_App_BackupPC:backuppc.host[{#BPCHOST},new_size].last()} &gt;= ({Template_App_BackupPC:backuppc.host[{#BPCHOST},new_size_q1].last()} - ({Template_App_BackupPC:backuppc.host[{#BPCHOST},new_size_q3].last()} - {Template_App_BackupPC:backuppc.host[{#BPCHOST},new_size_q1].last()}) * 1.5) or {Template_App_BackupPC:backuppc.host[{#BPCHOST},new_size].last()} &gt;= {Template_App_BackupPC:backuppc.host[{#BPCHOST},new_size_avg].last()}/{#BPC_TOO_SMALL_FACTOR}</recovery_expression>
<name>Suspiciously small ({ITEM.VALUE3}) backups for {#BPCHOST}</name>
<priority>WARNING</priority>
</trigger_prototype>
<trigger_prototype>
<expression>{Template_App_BackupPC:backuppc.host[{#BPCHOST},errors].last(0)}&gt;{#BPCMAXERROR} and {Template_App_BackupPC:backuppc.host[{#BPCHOST},enabled].last()}=1</expression>
<name>{ITEM.LASTVALUE1} (xfer) in last backup of {#BPCHOST}</name>
<priority>WARNING</priority>
</trigger_prototype>
</trigger_prototypes>
<graph_prototypes>
<graph_prototype>
<name>BackupPC: Host {#BPCHOST}: Compression ratio and perf</name>
<ymin_type_1>FIXED</ymin_type_1>
<graph_items>
<graph_item>
<drawtype>GRADIENT_LINE</drawtype>
<color>43A047</color>
<item>
<host>Template_App_BackupPC</host>
<key>backuppc.host[{#BPCHOST},total_size]</key>
</item>
</graph_item>
<graph_item>
<sortorder>1</sortorder>
<drawtype>BOLD_LINE</drawtype>
<color>FF0000</color>
<yaxisside>RIGHT</yaxisside>
<item>
<host>Template_App_BackupPC</host>
<key>backuppc.host[{#BPCHOST},comp_ratio]</key>
</item>
</graph_item>
<graph_item>
<sortorder>2</sortorder>
<color>7E57C2</color>
<yaxisside>RIGHT</yaxisside>
<item>
<host>Template_App_BackupPC</host>
<key>backuppc.host[{#BPCHOST},duration]</key>
</item>
</graph_item>
</graph_items>
</graph_prototype>
<graph_prototype>
<name>BackupPC: Host {#BPCHOST}: Full / History sizes</name>
<type>STACKED</type>
<graph_items>
<graph_item>
<color>4000FF</color>
<item>
<host>Template_App_BackupPC</host>
<key>backuppc.host[{#BPCHOST},full_size]</key>
</item>
</graph_item>
<graph_item>
<sortorder>1</sortorder>
<color>4DD0E1</color>
<item>
<host>Template_App_BackupPC</host>
<key>backuppc.host[{#BPCHOST},history_size]</key>
</item>
</graph_item>
</graph_items>
</graph_prototype>
</graph_prototypes>
<request_method>POST</request_method>
</discovery_rule>
</discovery_rules>
<macros>
<macro>
<macro>{$BPC_SIZE_WARN_FROM_HOUR}</macro>
<value>080000</value>
<description>Size alerts will only start after this hour</description>
</macro>
<macro>
<macro>{$EXT_BACKUPS}</macro>
<value>0</value>
<description>If set to a positive duration, can alert if no raidsync backups has been made recently</description>
</macro>
</macros>
</template>
</templates>
<triggers>
<trigger>
<expression>{Template_App_BackupPC:proc.num[,backuppc,,BackupPC].sum(#2)}=0 and {Template_App_BackupPC:proc.num[,root,,BackupPC_raidsync].sum(#2)}=0</expression>
<name>BackupPC isn't running</name>
<priority>AVERAGE</priority>
</trigger>
</triggers>
</zabbix_export>