Skip to content

Commit 72b7b82

Browse files
committed
Merge pull request #60 from richm/es-copy
Allow fluentd to send copies of logs to another Elasticsearch
2 parents 39dfa0e + 7ec7a0e commit 72b7b82

File tree

11 files changed

+462
-37
lines changed

11 files changed

+462
-37
lines changed

README.md

Lines changed: 36 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -223,6 +223,42 @@ Mount the created secret to your Fluentd container
223223
oc volumes dc/logging-fluentd --add --type=secret --secret-name=fluentd-throttle --mount-path=/etc/throttle-settings --name=throttle-settings --overwrite
224224
```
225225

226+
## Have Fluentd send logs to another Elasticsearch
227+
228+
You can configure Fluentd to send a copy of each log message to both the
229+
Elasticsearch instance included with OpenShift aggregated logging, _and_ to an
230+
external Elasticsearch instance. For example, if you already have an
231+
Elasticsearch instance set up for auditing purposes, or data warehousing, you
232+
can send a copy of each log message to that Elasticsearch, in addition to the
233+
the Elasticsearch hosted with OpenShift aggregated logging.
234+
235+
If the environment variable `ES_COPY` is `"true"`, Fluentd will send a copy of
236+
the logs to another Elasticsearch. The settings for the copy are just like the
237+
current `ES_HOST`, etc. and `OPS_HOST`, etc. settings, except that they add
238+
`_COPY`: `ES_COPY_HOST`, `OPS_COPY_HOST`, etc. There are some additional
239+
parameters added:
240+
* `ES_COPY_SCHEME`, `OPS_COPY_SCHEME` - can use either http or https - defaults
241+
to https
242+
* `ES_COPY_USERNAME`, `OPS_COPY_USERNAME` - user name to use to authenticate to
243+
elasticsearch using username/password auth
244+
* `ES_COPY_PASSWORD`, `OPS_COPY_PASSWORD` - password to use to authenticate to
245+
elasticsearch using username/password auth
246+
247+
To set the parameters::
248+
249+
oc edit -n logging template logging-fluentd-template
250+
# add/edit ES_COPY to have the value "true" - with the quotes
251+
# add or edit the COPY parameters listed above
252+
# automated:
253+
# oc get -n logging template logging-fluentd-template -o yaml > file
254+
# edit the file with sed/perl/whatever
255+
# oc replace -n logging -f file
256+
oc delete daemonset logging-fluentd
257+
# wait for fluentd to stop
258+
oc process -n logging logging-fluentd-template | \
259+
oc create -n logging -f -
260+
# this creates the daemonset and starts fluentd with the new params
261+
226262
## Upgrading your EFK stack
227263

228264
If you need to upgrade your EFK stack with new images, you'll need to take the

deployment/templates/fluentd.yaml

Lines changed: 104 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -86,6 +86,40 @@ objects:
8686
value: ${OPS_CLIENT_KEY}
8787
- name: "OPS_CA"
8888
value: ${OPS_CA}
89+
- name: "ES_COPY"
90+
value: ${ES_COPY}
91+
- name: "ES_COPY_HOST"
92+
value: ${ES_COPY_HOST}
93+
- name: "ES_COPY_PORT"
94+
value: ${ES_COPY_PORT}
95+
- name: "ES_COPY_SCHEME"
96+
value: ${ES_COPY_SCHEME}
97+
- name: "ES_COPY_CLIENT_CERT"
98+
value: ${ES_COPY_CLIENT_CERT}
99+
- name: "ES_COPY_CLIENT_KEY"
100+
value: ${ES_COPY_CLIENT_KEY}
101+
- name: "ES_COPY_CA"
102+
value: ${ES_COPY_CA}
103+
- name: "ES_COPY_USERNAME"
104+
value: ${ES_COPY_USERNAME}
105+
- name: "ES_COPY_PASSWORD"
106+
value: ${ES_COPY_PASSWORD}
107+
- name: "OPS_COPY_HOST"
108+
value: ${OPS_COPY_HOST}
109+
- name: "OPS_COPY_PORT"
110+
value: ${OPS_COPY_PORT}
111+
- name: "OPS_COPY_SCHEME"
112+
value: ${OPS_COPY_SCHEME}
113+
- name: "OPS_COPY_CLIENT_CERT"
114+
value: ${OPS_COPY_CLIENT_CERT}
115+
- name: "OPS_COPY_CLIENT_KEY"
116+
value: ${OPS_COPY_CLIENT_KEY}
117+
- name: "OPS_COPY_CA"
118+
value: ${OPS_COPY_CA}
119+
- name: "OPS_COPY_USERNAME"
120+
value: ${OPS_COPY_USERNAME}
121+
- name: "OPS_COPY_PASSWORD"
122+
value: ${OPS_COPY_PASSWORD}
89123
volumes:
90124
- name: varlog
91125
hostPath:
@@ -121,7 +155,7 @@ parameters:
121155
name: ES_CLIENT_KEY
122156
value: "/etc/fluent/keys/key"
123157
-
124-
description: "Location of CA cert for validating connectiong to ElasticSearch to write logs"
158+
description: "Location of CA cert for validating connection to ElasticSearch to write logs"
125159
name: ES_CA
126160
value: "/etc/fluent/keys/ca"
127161
-
@@ -141,7 +175,7 @@ parameters:
141175
name: OPS_CLIENT_KEY
142176
value: "/etc/fluent/keys/key"
143177
-
144-
description: "Location of CA cert for validating connectiong to ElasticSearch to write cluster logs"
178+
description: "Location of CA cert for validating connection to ElasticSearch to write cluster logs"
145179
name: OPS_CA
146180
value: "/etc/fluent/keys/ca"
147181
-
@@ -152,3 +186,71 @@ parameters:
152186
description: "The image version for the Fluentd image to use"
153187
name: IMAGE_VERSION
154188
value: ""
189+
-
190+
description: "Send a copy of the logs to an additional Elasticsearch instance."
191+
name: ES_COPY
192+
value: "false"
193+
-
194+
description: "Hostname (or IP) for additional ElasticSearch"
195+
name: ES_COPY_HOST
196+
value: ""
197+
-
198+
description: "Port number for additional ElasticSearch"
199+
name: ES_COPY_PORT
200+
value: ""
201+
-
202+
description: "URL scheme for additional ElasticSearch - http or https - default is https"
203+
name: ES_COPY_SCHEME
204+
value: "https"
205+
-
206+
description: "Location of client certificate for authenticating to additional ElasticSearch"
207+
name: ES_COPY_CLIENT_CERT
208+
value: ""
209+
-
210+
description: "Location of client key for authenticating to additional ElasticSearch"
211+
name: ES_COPY_CLIENT_KEY
212+
value: ""
213+
-
214+
description: "Location of CA cert for validating connection to additional ElasticSearch"
215+
name: ES_COPY_CA
216+
value: ""
217+
-
218+
description: "Username for username/password auth to connection to additional ElasticSearch"
219+
name: ES_COPY_USERNAME
220+
value: ""
221+
-
222+
description: "Password for username/password auth to connection to additional ElasticSearch"
223+
name: ES_COPY_PASSWORD
224+
value: ""
225+
-
226+
description: "Hostname (or IP) for additional ElasticSearch to write cluster logs"
227+
name: OPS_COPY_HOST
228+
value: ""
229+
-
230+
description: "Port number for additional ElasticSearch to write cluster logs"
231+
name: OPS_COPY_PORT
232+
value: ""
233+
-
234+
description: "URL scheme for additional ElasticSearch to write cluster logs - http or https - default is https"
235+
name: OPS_COPY_SCHEME
236+
value: "https"
237+
-
238+
description: "Location of client certificate for authenticating to additional ElasticSearch to write cluster logs"
239+
name: OPS_COPY_CLIENT_CERT
240+
value: ""
241+
-
242+
description: "Location of client key for authenticating to additional ElasticSearch to write cluster logs"
243+
name: OPS_COPY_CLIENT_KEY
244+
value: ""
245+
-
246+
description: "Location of CA cert for validating connectiong to additional ElasticSearch to write cluster logs"
247+
name: OPS_COPY_CA
248+
value: ""
249+
-
250+
description: "Username for username/password auth to connection to additional ElasticSearch to write cluster logs"
251+
name: OPS_COPY_USERNAME
252+
value: ""
253+
-
254+
description: "Password for username/password auth to connection to additional ElasticSearch to write cluster logs"
255+
name: OPS_COPY_PASSWORD
256+
value: ""

fluentd/Dockerfile

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,9 @@ RUN rpmkeys --import file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-7 && \
2828

2929
ADD fluent.conf /etc/fluent/fluent.conf
3030
ADD configs.d/ /etc/fluent/configs.d/
31-
ADD run.sh generate_throttle_configs.rb ${HOME}/
31+
ADD run.sh generate_throttle_configs.rb \
32+
fluentd_es_copy_config.conf fluentd_es_ops_copy_config.conf \
33+
${HOME}/
3234

3335
WORKDIR ${HOME}
3436

Lines changed: 3 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -1,17 +1,5 @@
11
<match **>
2-
@type elasticsearch_dynamic
3-
host "#{ENV['ES_HOST']}"
4-
port "#{ENV['ES_PORT']}"
5-
scheme https
6-
index_name ${record['kubernetes_namespace_name']}.${Time.at(time).getutc.strftime(@logstash_dateformat)}
7-
user fluentd
8-
password changeme
9-
10-
client_key "#{ENV['ES_CLIENT_KEY']}"
11-
client_cert "#{ENV['ES_CLIENT_CERT']}"
12-
ca_file "#{ENV['ES_CA']}"
13-
14-
flush_interval 5s
15-
max_retry_wait 300
16-
disable_retry_limit
2+
@type copy
3+
@include fluentd_es_config.conf
4+
@include fluentd_es_copy_config.conf
175
</match>
Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
<store>
2+
@type elasticsearch_dynamic
3+
host "#{ENV['ES_HOST']}"
4+
port "#{ENV['ES_PORT']}"
5+
scheme https
6+
index_name ${record['kubernetes_namespace_name']}.${Time.at(time).getutc.strftime(@logstash_dateformat)}
7+
user fluentd
8+
password changeme
9+
10+
client_key "#{ENV['ES_CLIENT_KEY']}"
11+
client_cert "#{ENV['ES_CLIENT_CERT']}"
12+
ca_file "#{ENV['ES_CA']}"
13+
14+
flush_interval 5s
15+
max_retry_wait 300
16+
disable_retry_limit
17+
</store>
Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
<store>
2+
@type elasticsearch_dynamic
3+
host "#{ENV['OPS_HOST']}"
4+
port "#{ENV['OPS_PORT']}"
5+
scheme https
6+
index_name .operations.${record['time'].nil? ? Time.at(time).getutc.strftime(@logstash_dateformat) : Time.parse(record['time']).getutc.strftime(@logstash_dateformat)}
7+
user fluentd
8+
password changeme
9+
10+
client_key "#{ENV['OPS_CLIENT_KEY']}"
11+
client_cert "#{ENV['OPS_CLIENT_CERT']}"
12+
ca_file "#{ENV['OPS_CA']}"
13+
14+
flush_interval 5s
15+
max_retry_wait 300
16+
disable_retry_limit
17+
</store>
Lines changed: 3 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -1,18 +1,5 @@
11
<match system.var.log** **_default_** **_openshift_** **_openshift-infra_**>
2-
@type elasticsearch_dynamic
3-
host "#{ENV['OPS_HOST']}"
4-
port "#{ENV['OPS_PORT']}"
5-
scheme https
6-
index_name .operations.${record['time'].nil? ? Time.at(time).getutc.strftime(@logstash_dateformat) : Time.parse(record['time']).getutc.strftime(@logstash_dateformat)}
7-
8-
user fluentd
9-
password changeme
10-
11-
client_key "#{ENV['OPS_CLIENT_KEY']}"
12-
client_cert "#{ENV['OPS_CLIENT_CERT']}"
13-
ca_file "#{ENV['OPS_CA']}"
14-
15-
flush_interval 5s
16-
max_retry_wait 300
17-
disable_retry_limit
2+
@type copy
3+
@include fluentd_es_ops_config.conf
4+
@include fluentd_es_ops_copy_config.conf
185
</match>
Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
<store>
2+
@type elasticsearch_dynamic
3+
host "#{ENV['ES_COPY_HOST']}"
4+
port "#{ENV['ES_COPY_PORT']}"
5+
scheme "#{ENV['ES_COPY_SCHEME']}"
6+
index_name ${record['kubernetes_namespace_name']}.${Time.at(time).getutc.strftime(@logstash_dateformat)}
7+
user "#{ENV['ES_COPY_USERNAME']}"
8+
password "#{ENV['ES_COPY_PASSWORD']}"
9+
10+
client_key "#{ENV['ES_COPY_CLIENT_KEY']}"
11+
client_cert "#{ENV['ES_COPY_CLIENT_CERT']}"
12+
ca_file "#{ENV['ES_COPY_CA']}"
13+
14+
flush_interval 5s
15+
max_retry_wait 300
16+
disable_retry_limit
17+
</store>
Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
<store>
2+
@type elasticsearch_dynamic
3+
host "#{ENV['OPS_COPY_HOST']}"
4+
port "#{ENV['OPS_COPY_PORT']}"
5+
scheme "#{ENV['OPS_COPY_SCHEME']}"
6+
index_name .operations.${record['time'].nil? ? Time.at(time).getutc.strftime(@logstash_dateformat) : Time.parse(record['time']).getutc.strftime(@logstash_dateformat)}
7+
user "#{ENV['OPS_COPY_USERNAME']}"
8+
password "#{ENV['OPS_COPY_PASSWORD']}"
9+
10+
client_key "#{ENV['OPS_COPY_CLIENT_KEY']}"
11+
client_cert "#{ENV['OPS_COPY_CLIENT_CERT']}"
12+
ca_file "#{ENV['OPS_COPY_CA']}"
13+
14+
flush_interval 5s
15+
max_retry_wait 300
16+
disable_retry_limit
17+
</store>

fluentd/run.sh

Lines changed: 32 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,41 @@
1+
#!/bin/bash
2+
13
if [[ $VERBOSE ]]; then
24
set -ex
5+
fluentdargs="-vv"
36
else
47
set -e
8+
fluentdargs=
59
fi
610

7-
mkdir -p /etc/fluent/configs.d/input/docker
8-
mkdir -p /etc/fluent/configs.d/input/syslog
11+
OPS_COPY_HOST="${OPS_COPY_HOST:-$ES_COPY_HOST}"
12+
OPS_COPY_PORT="${OPS_COPY_PORT:-$ES_COPY_PORT}"
13+
OPS_COPY_SCHEME="${OPS_COPY_SCHEME:-$ES_COPY_SCHEME}"
14+
OPS_COPY_CLIENT_CERT="${OPS_COPY_CLIENT_CERT:-$ES_COPY_CLIENT_CERT}"
15+
OPS_COPY_CLIENT_KEY="${OPS_COPY_CLIENT_KEY:-$ES_COPY_CLIENT_KEY}"
16+
OPS_COPY_CA="${OPS_COPY_CA:-$ES_COPY_CA}"
17+
OPS_COPY_USERNAME="${OPS_COPY_USERNAME:-$ES_COPY_USERNAME}"
18+
OPS_COPY_PASSWORD="${OPS_COPY_PASSWORD:-$ES_COPY_PASSWORD}"
19+
export OPS_COPY_HOST OPS_COPY_PORT OPS_COPY_SCHEME OPS_COPY_CLIENT_CERT \
20+
OPS_COPY_CLIENT_KEY OPS_COPY_CA OPS_COPY_USERNAME OPS_COPY_PASSWORD
21+
22+
CFG_IN_DIR=/etc/fluent/configs.d/input
23+
CFG_OUT_DIR=/etc/fluent/configs.d/output
24+
25+
mkdir -p $CFG_IN_DIR/docker
26+
mkdir -p $CFG_IN_DIR/syslog
927

1028
ruby generate_throttle_configs.rb
1129

12-
fluentd
30+
if [ "$ES_COPY" = "true" ] ; then
31+
# user wants to split the output of fluentd into two different elasticsearch
32+
# user will provide the necessary COPY environment variables as above
33+
cp $HOME/fluentd_es_copy_config.conf $HOME/fluentd_es_ops_copy_config.conf $CFG_OUT_DIR
34+
else
35+
# create empty files for the ES copy config
36+
rm -f $CFG_OUT_DIR/fluentd_es_copy_config.conf $CFG_OUT_DIR/fluentd_es_ops_copy_config.conf
37+
touch $CFG_OUT_DIR/fluentd_es_copy_config.conf $CFG_OUT_DIR/fluentd_es_ops_copy_config.conf
38+
fi
39+
40+
41+
fluentd $fluentdargs

0 commit comments

Comments
 (0)