From 9cdeee5c754782a46d1007ffbab6cbb777946c37 Mon Sep 17 00:00:00 2001 From: Michael Burke Date: Thu, 18 Jul 2019 16:03:07 -0400 Subject: [PATCH] Adding rsyslog topics back to 4.2 --- _topic_map.yml | 2 +- logging/config/efk-logging-configuring.adoc | 3 -- logging/config/efk-logging-fluentd.adoc | 10 ++-- logging/config/efk-logging-systemd.adoc | 4 +- logging/efk-logging-eventrouter.adoc | 5 ++ logging/efk-logging.adoc | 2 +- modules/efk-logging-about-components.adoc | 6 +-- modules/efk-logging-about-curator.adoc | 2 +- modules/efk-logging-about-elasticsearch.adoc | 2 +- modules/efk-logging-about-eventrouter.adoc | 7 ++- modules/efk-logging-about-fluentd.adoc | 12 ++--- modules/efk-logging-about-kibana.adoc | 2 +- modules/efk-logging-about.adoc | 2 +- .../efk-logging-configuring-image-about.adoc | 27 ++++------- modules/efk-logging-deploying-about.adoc | 2 - modules/efk-logging-eventrouter-deploy.adoc | 5 ++ .../efk-logging-external-elasticsearch.adoc | 8 ++-- modules/efk-logging-external-syslog.adoc | 9 +++- modules/efk-logging-fluentd-alerts.adoc | 44 +++++++++++------- modules/efk-logging-fluentd-collector.adoc | 4 +- modules/efk-logging-fluentd-envvar.adoc | 9 +++- modules/efk-logging-fluentd-external.adoc | 8 ++++ modules/efk-logging-fluentd-json.adoc | 16 ++++--- modules/efk-logging-fluentd-limits.adoc | 29 ++++++++++-- modules/efk-logging-fluentd-log-location.adoc | 16 ++++--- modules/efk-logging-fluentd-log-rotation.adoc | 46 +++++++++++++------ modules/efk-logging-fluentd-pod-location.adoc | 31 +++++++++---- modules/efk-logging-fluentd-throttling.adoc | 24 ++++++++-- 28 files changed, 219 insertions(+), 118 deletions(-) diff --git a/_topic_map.yml b/_topic_map.yml index b821416da2fc..e8a482a43a07 100644 --- a/_topic_map.yml +++ b/_topic_map.yml @@ -743,7 +743,7 @@ Topics: File: efk-logging-kibana - Name: Configuring Curator File: efk-logging-curator - - Name: Configuring Fluentd + - Name: Configuring the logging collector File: efk-logging-fluentd - Name: Configuring systemd-journald File: efk-logging-systemd diff --git a/logging/config/efk-logging-configuring.adoc b/logging/config/efk-logging-configuring.adoc index 0f695cb3258a..f3c5ec19e63f 100644 --- a/logging/config/efk-logging-configuring.adoc +++ b/logging/config/efk-logging-configuring.adoc @@ -69,15 +69,12 @@ environment variable in the `cluster-logging-operator` Deployment. * You can specify specific nodes for the logging components using node selectors. -//// -4.1 * You can specify the Log collectors to deploy to each node in a cluster, either Fluentd or Rsyslog. [IMPORTANT] ==== The Rsyslog log collector is currently a Technology Preview feature. ==== -//// // The following include statements pull in the module files that comprise // the assembly. Include any combination of concept, procedure, or reference diff --git a/logging/config/efk-logging-fluentd.adoc b/logging/config/efk-logging-fluentd.adoc index 62213c6486aa..879e4cc3425d 100644 --- a/logging/config/efk-logging-fluentd.adoc +++ b/logging/config/efk-logging-fluentd.adoc @@ -1,13 +1,13 @@ :context: efk-logging-fluentd [id="efk-logging-fluentd"] -= Configuring Fluentd += Configuring the logging collector include::modules/common-attributes.adoc[] toc::[] -{product-title} uses Fluentd to collect operations and application logs from your cluster which {product-title} enriches with Kubernetes Pod and Namespace metadata. +{product-title} uses Fluentd or Rsyslog to collect operations and application logs from your cluster which {product-title} enriches with Kubernetes Pod and Namespace metadata. -You can configure log rotation, log location, use an external log aggregator, and make other configurations. +You can configure log rotation, log location, use an external log aggregator, change the log collector, and make other configurations for either log collector. [NOTE] ==== @@ -29,11 +29,9 @@ include::modules/efk-logging-fluentd-limits.adoc[leveloffset=+1] //// 4.1 modules/efk-logging-fluentd-log-rotation.adoc[leveloffset=+1] - -4.2 -modules/efk-logging-fluentd-collector.adoc[leveloffset=+1] //// +include::modules/efk-logging-fluentd-collector.adoc[leveloffset=+1] include::modules/efk-logging-fluentd-log-location.adoc[leveloffset=+1] diff --git a/logging/config/efk-logging-systemd.adoc b/logging/config/efk-logging-systemd.adoc index 493cc4d34f52..d9af320ad5f4 100644 --- a/logging/config/efk-logging-systemd.adoc +++ b/logging/config/efk-logging-systemd.adoc @@ -1,11 +1,11 @@ :context: efk-logging-systemd [id="efk-logging-systemd"] -= Configuring systemd-journald and rsyslog += Configuring systemd-journald and Rsyslog include::modules/common-attributes.adoc[] toc::[] -Because Fluentd and rsyslog read from the journal, and the journal default +Because Fluentd and Rsyslog read from the journal, and the journal default settings are very low, journal entries can be lost because the journal cannot keep up with the logging rate from system services. diff --git a/logging/efk-logging-eventrouter.adoc b/logging/efk-logging-eventrouter.adoc index e0f277d9ead1..ea8510eb7d99 100644 --- a/logging/efk-logging-eventrouter.adoc +++ b/logging/efk-logging-eventrouter.adoc @@ -9,6 +9,11 @@ The Event Router communicates with the {product-title} and prints {product-title If Cluster Logging is deployed, you can view the {product-title} events in Kibana. +NOTE: +==== +The Event Router is not supported for the Rsyslog log collector. +==== + // The following include statements pull in the module files that comprise // the assembly. Include any combination of concept, procedure, or reference // modules required to cover the user story. You can also include other diff --git a/logging/efk-logging.adoc b/logging/efk-logging.adoc index 5df646d21224..4a5e66b56343 100644 --- a/logging/efk-logging.adoc +++ b/logging/efk-logging.adoc @@ -28,7 +28,7 @@ include::modules/efk-logging-about-curator.adoc[leveloffset=+2] include::modules/efk-logging-about-eventrouter.adoc[leveloffset=+2] -include::modules/efk-logging-about-crd.adoc[leveloffset=+2] +include::modules/efk-logging-about-crd.adoc[leveloffset=+1] diff --git a/modules/efk-logging-about-components.adoc b/modules/efk-logging-about-components.adoc index f38524ce1b73..6101f5e45c3e 100644 --- a/modules/efk-logging-about-components.adoc +++ b/modules/efk-logging-about-components.adoc @@ -5,13 +5,13 @@ [id="efk-logging-about-components_{context}"] = About cluster logging components -There are currently 4 different types of cluster logging components: +There are currently 5 different types of cluster logging components: * logStore - This is where the logs will be stored. The current implementation is Elasticsearch. -* collection - This is the component that collects logs from the node, formats them, and stores them in the logStore. The current implementation is Fluentd. +* collection - This is the component that collects logs from the node, formats them, and stores them in the logStore, either Fluentd or Rsyslog. * visualization - This is the UI component used to view logs, graphs, charts, and so forth. The current implementation is Kibana. * curation - This is the component that trims logs by age. The current implementation is Curator. - +* event routing - This is the component forwards events to cluster logging. The current implementation is Event Router. In this document, we may refer to logStore or Elasticsearch, visualization or Kibana, curation or Curator, collection or Fluentd, interchangeably, except where noted. diff --git a/modules/efk-logging-about-curator.adoc b/modules/efk-logging-about-curator.adoc index 03732d23d1e3..6a57ba5b6c11 100644 --- a/modules/efk-logging-about-curator.adoc +++ b/modules/efk-logging-about-curator.adoc @@ -3,7 +3,7 @@ // * logging/efk-logging.adoc [id="efk-logging-about-curator_{context}"] -= About Curator += About logging curation The Elasticsearch Curator tool performs scheduled maintenance operations on a global and/or on a per-project basis. Curator performs actions daily based on its configuration. Only one Curator Pod is recommended per Elasticsearch cluster. diff --git a/modules/efk-logging-about-elasticsearch.adoc b/modules/efk-logging-about-elasticsearch.adoc index 141d33d74636..bf1b3378d57b 100644 --- a/modules/efk-logging-about-elasticsearch.adoc +++ b/modules/efk-logging-about-elasticsearch.adoc @@ -3,7 +3,7 @@ // * logging/efk-logging.adoc [id="efk-logging-about-elasticsearch_{context}"] -= About Elasticsearch += About the logstore {product-title} uses link:https://www.elastic.co/products/elasticsearch[Elasticsearch (ES)] to organize the log data from Fluentd into datastores, or _indices_. diff --git a/modules/efk-logging-about-eventrouter.adoc b/modules/efk-logging-about-eventrouter.adoc index fe9da1ff239e..1599c4ffa619 100644 --- a/modules/efk-logging-about-eventrouter.adoc +++ b/modules/efk-logging-about-eventrouter.adoc @@ -3,7 +3,7 @@ // * logging/efk-logging.adoc [id="efk-logging-about-eventrouter_{context}"] -= About Event Router += About event routing The Event Router is a pod that forwards {product-title} events to cluster logging. You must manually deploy Event Router. @@ -11,3 +11,8 @@ You must manually deploy Event Router. The Event Router collects events and converts them into JSON format, which takes those events and pushes them to `STDOUT`. Fluentd indexes the events to the `.operations` index. + +NOTE: +==== +The Event Router is not supported for the Rsyslog log collector. +==== diff --git a/modules/efk-logging-about-fluentd.adoc b/modules/efk-logging-about-fluentd.adoc index 45fb13efa1ba..9d06f8d98fd3 100644 --- a/modules/efk-logging-about-fluentd.adoc +++ b/modules/efk-logging-about-fluentd.adoc @@ -3,19 +3,17 @@ // * logging/efk-logging.adoc [id="efk-logging-about-fluentd_{context}"] -= About Fluentd += About the logging collector -{product-title} uses Fluentd to collect data about your cluster. +{product-title} can use Fluentd or Rsyslog to collect data about your cluster. -Fluentd is deployed as a DaemonSet in {product-title} that deploys pods to each {product-title} node. - -Fluentd uses `journald` as the system log source. These are log messages from -the operating system, the container runtime, and {product-title}. +The logging collector is deployed as a DaemonSet in {product-title} that deploys pods to each {product-title} node. +`journald` is the system log source supplying log messages from the operating system, the container runtime, and {product-title}. The container runtimes provide minimal information to identify the source of log messages: project, pod name, and container id. This is not sufficient to uniquely identify the source of the logs. If a pod with a given name and project is deleted before the log collector begins processing its logs, information from the API server, such as labels and annotations, -is not be available. There might not be a way to distinguish the log messages from a similarly named pod and project or trace the logs to their source. +might not be available. There might not be a way to distinguish the log messages from a similarly named pod and project or trace the logs to their source. This limitation means log collection and normalization is considered *best effort*. [IMPORTANT] diff --git a/modules/efk-logging-about-kibana.adoc b/modules/efk-logging-about-kibana.adoc index c4fca027e296..1f5b104dc0fc 100644 --- a/modules/efk-logging-about-kibana.adoc +++ b/modules/efk-logging-about-kibana.adoc @@ -3,7 +3,7 @@ // * logging/efk-logging.adoc [id="efk-logging-about-kibana_{context}"] -= About Kibana += About logging visualization {product-title} uses Kibana to display the log data collected by Fluentd and indexed by Elasticsearch. diff --git a/modules/efk-logging-about.adoc b/modules/efk-logging-about.adoc index 6d63cc73c018..2ab170b6734d 100644 --- a/modules/efk-logging-about.adoc +++ b/modules/efk-logging-about.adoc @@ -15,7 +15,7 @@ link:https://www.elastic.co/guide/en/kibana/current/introduction.html[Kibana] is where users and administrators can create rich visualizations and dashboards with the aggregated data. {product-title} cluster administrators can deploy cluster logging by creating a subscription from the console -in the 'openshift-logging' project. Creating the subscription deploys the Cluster Logging Operator, the Elasticsearch Operator, and the +in the `openshift-logging` project. Creating the subscription deploys the Cluster Logging Operator, the Elasticsearch Operator, and the other resources necessary to support the deployment of cluster logging. The operators are responsible for deploying, upgrading, and maintaining cluster logging. diff --git a/modules/efk-logging-configuring-image-about.adoc b/modules/efk-logging-configuring-image-about.adoc index 1322a778ce5b..53df3c749c12 100644 --- a/modules/efk-logging-configuring-image-about.adoc +++ b/modules/efk-logging-configuring-image-about.adoc @@ -14,34 +14,23 @@ You can view the images by running the following command: ---- oc -n openshift-logging set env deployment/cluster-logging-operator --list | grep _IMAGE -ELASTICSEARCH_IMAGE=registry.redhat.io/openshift4/ose-logging-elasticsearch5:v4.1 <1> -FLUENTD_IMAGE=registry.redhat.io/openshift4/ose-logging-fluentd:v4.1 <2> -KIBANA_IMAGE=registry.redhat.io/openshift4/ose-logging-kibana5:v4.1 <3> -CURATOR_IMAGE=registry.redhat.io/openshift4/ose-logging-curator5:v4.1 <4> -OAUTH_PROXY_IMAGE=registry.redhat.io/openshift4/ose-oauth-proxy:v4.1 <5> +ELASTICSEARCH_IMAGE=registry.redhat.io/openshift4/ose-logging-elasticsearch5:v4.2 <1> +FLUENTD_IMAGE=registry.redhat.io/openshift4/ose-logging-fluentd:v4.2 <2> +KIBANA_IMAGE=registry.redhat.io/openshift4/ose-logging-kibana5:v4.2 <3> +CURATOR_IMAGE=registry.redhat.io/openshift4/ose-logging-curator5:v4.2 <4> +OAUTH_PROXY_IMAGE=registry.redhat.io/openshift4/ose-oauth-proxy:v4.2 <5> +RSYSLOG_IMAGE=registry.redhat.io/openshift4/ose-logging-rsyslog:v4.2 <6> ---- <1> *ELASTICSEARCH_IMAGE* deploys Elasticsearch. <2> *FLUENTD_IMAGE* deploys Fluentd. <3> *KIBANA_IMAGE* deploys Kibana. <4> *CURATOR_IMAGE* deploys Curator. <5> *OAUTH_PROXY_IMAGE* defines OAUTH for OpenShift Container Platform. - -[NOTE] -==== -The values might be different depending on your environment. -==== - - - -//// -Comment out until 4.1 -* *RSYSLOG_IMAGE* deploys Rsyslog, by default `docker.io/viaq/rsyslog:latest`. <1> - -<1> The image used for RSYSLOG when deployed. You can change this value using an environment variable. You cannot change this value through the Cluster Logging CR. +<6> *RSYSLOG_IMAGE* deploys Rsyslog. [NOTE] ==== The Rsyslog log collector is in Technology Preview. ==== -//// +The values might be different depending on your environment. diff --git a/modules/efk-logging-deploying-about.adoc b/modules/efk-logging-deploying-about.adoc index 2e60888ecc25..f38ed7e5d00d 100644 --- a/modules/efk-logging-deploying-about.adoc +++ b/modules/efk-logging-deploying-about.adoc @@ -136,7 +136,6 @@ You can set the policy that defines how Elasticsearch shards are replicated acro * `SingleRedundancy`. A single copy of each shard. Logs are always available and recoverable as long as at least two data nodes exist. * `ZeroRedundancy`. No copies of any shards. Logs may be unavailable (or lost) in the event a node is down or fails. -//// Log collectors:: You can select which log collector is deployed as a Daemonset to each node in the {product-title} cluster, either: @@ -157,7 +156,6 @@ You can select which log collector is deployed as a Daemonset to each node in th memory: type: "fluentd" ---- -//// Curator schedule:: You specify the schedule for Curator in the [cron format](https://en.wikipedia.org/wiki/Cron). diff --git a/modules/efk-logging-eventrouter-deploy.adoc b/modules/efk-logging-eventrouter-deploy.adoc index 0c69046945be..e95d8ead0d1f 100644 --- a/modules/efk-logging-eventrouter-deploy.adoc +++ b/modules/efk-logging-eventrouter-deploy.adoc @@ -9,6 +9,11 @@ Use the following steps to deploy Event Router into your cluster. The following Template object creates the Service Account, ClusterRole, and ClusterRoleBinding required for the Event Router. +NOTE: +==== +The Event Router is not supported for the Rsyslog log collector. +==== + .Prerequisites You need proper permissions to create service accounts and update cluster role bindings. For example, you can run the following template with a user that has the *cluster-admin* role. diff --git a/modules/efk-logging-external-elasticsearch.adoc b/modules/efk-logging-external-elasticsearch.adoc index eda34fcbf122..9519a36fa9f1 100644 --- a/modules/efk-logging-external-elasticsearch.adoc +++ b/modules/efk-logging-external-elasticsearch.adoc @@ -3,12 +3,12 @@ // * logging/efk-logging-external.adoc [id="efk-logging-external-elasticsearch_{context}"] -= Configuring Fluentd to send logs to an external Elasticsearch instance += Configuring the log collector to send logs to an external Elasticsearch instance -Fluentd sends logs to the value of the `ES_HOST`, `ES_PORT`, `OPS_HOST`, +The log collector sends logs to the value of the `ES_HOST`, `ES_PORT`, `OPS_HOST`, and `OPS_PORT` environment variables of the Elasticsearch deployment configuration. The application logs are directed to the `ES_HOST` destination, -and operations logs to `OPS_HOST`. +and operations logs to `OPS_HOST`. [NOTE] ==== @@ -28,7 +28,7 @@ an instance of Fluentd that you control and that is configured with the To direct logs to a specific Elasticsearch instance: -. Edit the `fluentd` DaemonSet in the *openshift-logging* project: +. Edit the `fluentd` or `rsyslog` DaemonSet in the *openshift-logging* project: + [source,yaml] ---- diff --git a/modules/efk-logging-external-syslog.adoc b/modules/efk-logging-external-syslog.adoc index dc7a1221a3d7..79991798a1bd 100644 --- a/modules/efk-logging-external-syslog.adoc +++ b/modules/efk-logging-external-syslog.adoc @@ -3,10 +3,15 @@ // * logging/efk-logging-external.adoc [id="efk-logging-external-syslog_{context}"] -= Configuring Fluentd to send logs to an external syslog server += Configuring log collector to send logs to an external syslog server Use the `fluent-plugin-remote-syslog` plug-in on the host to send logs to an -external syslog server. +external syslog server. + +[NOTE] +==== +For Rsyslog, you can edit the Rsyslog ConfigMap to add support for Syslog log forwarding using the *omfwd* module, see link:https://www.rsyslog.com/doc/v8-stable/configuration/modules/omfwd.html[omfwd: syslog Forwarding Output Module]. To send logs to a different Rsyslog instance, you can the *omrelp* module, see link:https://www.rsyslog.com/doc/v8-stable/configuration/modules/omrelp.html[omrelp: RELP Output Module]. +==== .Prerequisite diff --git a/modules/efk-logging-fluentd-alerts.adoc b/modules/efk-logging-fluentd-alerts.adoc index fdc15bc744f8..c048ec1976db 100644 --- a/modules/efk-logging-fluentd-alerts.adoc +++ b/modules/efk-logging-fluentd-alerts.adoc @@ -3,15 +3,32 @@ // * logging/efk-logging-fluentd.adoc [id="efk-logging-fluentd-log-viewing_{context}"] -= Viewing Fluentd logs += Viewing collected logs -How you view logs depends upon the `LOGGING_FILE_PATH` setting. +How you view logs generated by the log collector, Fluentd or Rsyslog, depends upon the `LOGGING_FILE_PATH` setting. + +* If you are using `LOGGING_FILE_PATH=console`, Fluentd writes logs to stdout/stderr`. +You can retrieve the logs with the `oc logs [-f] ` command, where the `-f` +is optional, from the project where the pod is located. ++ +---- +$ oc logs -f <1> +---- +<1> Specify the name of a Fluentd pod. Use the `-f` option to follow what is being written into the logs. ++ +For example ++ +---- +$ oc logs -f fluentd-ht42r -n openshift-logging +---- ++ +The contents of log files are printed out, starting with the oldest log. * If `LOGGING_FILE_PATH` points to a file, the default, use the *logs* utility, from the project, where the pod is located, to print out the contents of Fluentd log files: + ---- -$ oc exec -- logs <1> +$ oc exec -- logs <1> ---- <1> Specify the name of a Fluentd pod. Note the space before `logs`. + @@ -24,22 +41,15 @@ $ oc exec fluentd-ht42r -n openshift-logging -- logs To view the current setting: + ---- -oc -n openshift-logging set env daemonset/fluentd --list | grep LOGGING_FILE_PATH ----- +$ oc -n openshift-logging set env daemonset/fluentd --list | grep LOGGING_FILE_PATH -* If you are using `LOGGING_FILE_PATH=console`, Fluentd writes logs to stdout/stderr`. -You can retrieve the logs with the `oc logs [-f] ` command, where the `-f` -is optional, from the project where the pod is located. -+ ----- -$ oc logs -f <1> +LOGGING_FILE_PATH=/etc/fluentd/fluentd.log ---- -<1> Specify the name of a Fluentd pod. Use the `-f` option to follow what is being written into the logs. -+ -For example + ---- -$ oc logs -f fluentd-ht42r -n openshift-logging +$ oc -n openshift-logging set env daemonset/rsyslog --list | grep LOGGING_FILE_PATH + +LOGGING_FILE_PATH=/etc/rsyslog/rsyslog.log ---- -+ -The contents of log files are printed out, starting with the oldest log. + + diff --git a/modules/efk-logging-fluentd-collector.adoc b/modules/efk-logging-fluentd-collector.adoc index a69b6c50dbf1..0064bbb48143 100644 --- a/modules/efk-logging-fluentd-collector.adoc +++ b/modules/efk-logging-fluentd-collector.adoc @@ -51,7 +51,7 @@ nodeSpec: collection: logs: - type: "fluentd" <1> + type: "rsyslog" <1> ---- -<1> Set the log collector to `fluentd`, the default, or `rsyslog`. +<1> Set the log collector to `rsyslog` or `fluentd`. diff --git a/modules/efk-logging-fluentd-envvar.adoc b/modules/efk-logging-fluentd-envvar.adoc index 6560a64ce44c..f3d4bc3789d0 100644 --- a/modules/efk-logging-fluentd-envvar.adoc +++ b/modules/efk-logging-fluentd-envvar.adoc @@ -3,9 +3,14 @@ // * logging/efk-logging-fluentd.adoc [id="efk-logging-fluentd-envvar_{context}"] -= Configuring Fluentd using environment variables += Configuring the logging collector using environment variables -You can use link:https://github.com/openshift/origin-aggregated-logging/blob/master/fluentd/README.md[environment variables] to modify your Fluentd configuration. +You can use environment variables to modify the +configuration of the log collector, Fluentd or Rsyslog. + +See the link:https://github.com/openshift/origin-aggregated-logging/blob/master/fluentd/README.md[Fluentd README] in Github or the +link:https://github.com/openshift/origin-aggregated-logging/blob/master/rsyslog/README.md[Rsyslog README] for lists of the +available environment variables. .Prerequisite diff --git a/modules/efk-logging-fluentd-external.adoc b/modules/efk-logging-fluentd-external.adoc index 04e9a5f48cda..6dc9c6127367 100644 --- a/modules/efk-logging-fluentd-external.adoc +++ b/modules/efk-logging-fluentd-external.adoc @@ -13,6 +13,14 @@ hosted Fluentd has processed them. ifdef::openshift-origin[] The `secure-forward` plug-in is provided with the Fluentd image as of v1.4.0. endif::openshift-origin[] +ifdef::openshift-enterprise[] +The `secure-forward` plug-in is supported by Fluentd only. +enddef::openshift-enterprise[] + +[NOTE] +==== +For Rsyslog, you can edit the Rsyslog configmap to add support for Syslog log forwarding using the *omfwd* module, see link:https://www.rsyslog.com/doc/v8-stable/configuration/modules/omfwd.html[omfwd: syslog Forwarding Output Module]. To send logs to a different Rsyslog instance, you can the *omrelp* module, see link:https://www.rsyslog.com/doc/v8-stable/configuration/modules/omrelp.html[omrelp: RELP Output Module]. +==== The logging deployment provides a `secure-forward.conf` section in the Fluentd configmap for configuring the external aggregator: diff --git a/modules/efk-logging-fluentd-json.adoc b/modules/efk-logging-fluentd-json.adoc index 8244da356d62..d1a3226615cc 100644 --- a/modules/efk-logging-fluentd-json.adoc +++ b/modules/efk-logging-fluentd-json.adoc @@ -3,12 +3,12 @@ // * logging/efk-logging-fluentd.adoc [id="efk-logging-fluentd-json_{context}"] -= Configuring Fluentd JSON parsing += Configuring log collection JSON parsing -You can configure Fluentd to inspect each log message to determine if the message is in *JSON* format and merge +You can configure the log collector, Fluentd or Rsyslog, to determine if a log message is in *JSON* format and merge the message into the JSON payload document posted to Elasticsearch. This feature is disabled by default. -You can enable or disable this feature by editing the `MERGE_JSON_LOG` environment variable in the *fluentd* daemonset. +You can enable or disable this feature by editing the `MERGE_JSON_LOG` environment variable in the *fluentd* or *rsyslog* daemonset. [IMPORTANT] ==== @@ -18,7 +18,7 @@ Enabling this feature comes with risks, including: * Potential buffer storage leak caused by rejected message cycling. * Overwrite of data for field with same names. -The features in this topic should be used by only experienced Fluentd and Elasticsearch users. +The features in this topic should be used by only experienced Fluentd, Rsyslog, and Elasticsearch users. ==== .Prerequisites @@ -33,6 +33,10 @@ Use the following command to enable this feature: oc set env ds/fluentd MERGE_JSON_LOG=true <1> ---- +---- +oc set env ds/rsyslog MERGE_JSON_LOG=true <1> +---- + <1> Set this to `false` to disable this feature or `true` to enable this feature. *Setting MERGE_JSON_LOG and CDM_UNDEFINED_TO_STRING* @@ -41,8 +45,8 @@ If you set the `MERGE_JSON_LOG` and `CDM_UNDEFINED_TO_STRING` enviroment variabl When Fluentd rolls over the indices for the next day's logs, it will create a brand new index. The field definitions are updated and you will not get the *400* error. -Records that have *hard* errors, such as schema violations, corrupted data, and so forth, cannot be retried. Fluent sends the records for error handling. If you link:https://docs.fluentd.org/v1.0/articles/config-file#@error-label[add a -`