Skip to content

Commit 07f180a

Browse files
author
Remi Hakim
committed
Add more metrics to the haproxy integration and add the possibility to collect aggregates only
Fix #688 Fix #689
1 parent e07bbbf commit 07f180a

File tree

1 file changed

+15
-10
lines changed

1 file changed

+15
-10
lines changed

checks.d/haproxy.py

Lines changed: 15 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -42,12 +42,19 @@ def __init__(self, name, init_config, agentConfig):
4242
"wretr": ("rate", "warnings.retr_rate"),
4343
"wredis": ("rate", "warnings.redis_rate"),
4444
"req_rate": ("gauge", "requests.rate"),
45+
"hrsp_1xx": ("rate", "response.1xx"),
46+
"hrsp_2xx": ("rate", "response.2xx"),
47+
"hrsp_3xx": ("rate", "response.3xx"),
48+
"hrsp_4xx": ("rate", "response.4xx"),
49+
"hrsp_5xx": ("rate", "response.5xx"),
50+
"hrsp_other": ("rate", "response.other"),
4551
}
4652

4753
def check(self, instance):
4854
url = instance.get('url')
4955
username = instance.get('username')
5056
password = instance.get('password')
57+
collect_aggregates_only = instance.get('collect_aggregates_only', False)
5158

5259
self.log.debug('Processing HAProxy data for %s' % url)
5360

@@ -58,7 +65,7 @@ def check(self, instance):
5865
else:
5966
events_cb = None
6067

61-
self._process_data(data, self.hostname, self._process_metrics,
68+
self._process_data(data, collect_aggregates_only, self._process_metrics,
6269
events_cb, url)
6370

6471
def _fetch_data(self, url, username, password):
@@ -80,7 +87,7 @@ def _fetch_data(self, url, username, password):
8087
# Split the data by line
8188
return response.split('\n')
8289

83-
def _process_data(self, data, my_hostname, metric_cb=None, event_cb=None, url=None):
90+
def _process_data(self, data, collect_aggregates_only, metric_cb=None, event_cb=None, url=None):
8491
''' Main data-processing loop. For each piece of useful data, we'll
8592
either save a metric, save an event or both. '''
8693

@@ -103,32 +110,30 @@ def _process_data(self, data, my_hostname, metric_cb=None, event_cb=None, url=No
103110
if val:
104111
try:
105112
# Try converting to a long, if failure, just leave it
106-
val = long(val)
107-
except:
113+
val = float(val)
114+
except Exception:
108115
pass
109116
data_dict[fields[i]] = val
110117

111118
# Don't create metrics for aggregates
112119
service = data_dict['svname']
113120
if data_dict['svname'] in Services.ALL:
114-
if not data_list and service == Services.FRONTEND:
115-
data_list.append(data_dict)
121+
data_list.append(data_dict)
116122

117123
# Send the list of data to the metric and event callbacks
118124
if metric_cb:
119-
metric_cb(data_list, service, my_hostname)
125+
metric_cb(data_list, service)
120126
if event_cb:
121127
event_cb(data_list, url)
122128

123129
# Clear out the event list for the next service
124130
data_list = []
125-
else:
131+
elif not collect_aggregates_only:
126132
data_list.append(data_dict)
127133

128134
return data
129135

130-
def _process_metrics(self, data_list, service, my_hostname):
131-
hosts_to_aggregate = {}
136+
def _process_metrics(self, data_list, service):
132137
for data in data_list:
133138
"""
134139
Each element of data_list is a dictionary related to one host

0 commit comments

Comments
 (0)