From 6da83e8c5981a4f83d102f38d7025378dea77c00 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?No=C3=A9mi=20V=C3=A1nyi?= Date: Fri, 31 Aug 2018 10:02:06 +0200 Subject: [PATCH] Add tag "multiline" to "log.flags" if event consists of multiple lines. (#7997) Add "multiline" tag to "log.status" if the event contains multiple lines. This way users can filter for multiline messages using "multiline" in [log.status]. Example event { "@timestamp": "2018-08-17T11:35:21.813Z", "@metadata": { "beat": "filebeat", "type": "doc", "version": "7.0.0-alpha1" }, "source": "/home/n/test.log", "offset": 0, "log": { "status": [ "multiline" ], }, "message": "[test line\ntest line]", "prospector": { "type": "log" }, "input": { "type": "log" }, "beat": { "hostname": "sleipnir", "version": "7.0.0-alpha1", "name": "sleipnir" }, "host": { "name": "sleipnir" } } Closes #957 --- CHANGELOG.asciidoc | 1 + .../server/test/test.log-expected.json | 15 ++++++++ .../slowlog/test/test.log-expected.json | 3 ++ .../icinga/main/test/test.log-expected.json | 3 ++ ...-9.6-debian-with-slowlog.log-expected.json | 15 ++++++++ .../darwin-syslog-sample.log-expected.json | 3 ++ filebeat/reader/multiline/multiline.go | 4 +++ filebeat/reader/multiline/multiline_test.go | 5 +++ filebeat/tests/system/test_json.py | 34 ------------------- 9 files changed, 49 insertions(+), 34 deletions(-) diff --git a/CHANGELOG.asciidoc b/CHANGELOG.asciidoc index 6b2827debab3..990d09ff645f 100644 --- a/CHANGELOG.asciidoc +++ b/CHANGELOG.asciidoc @@ -105,6 +105,7 @@ https://github.com/elastic/beats/compare/v6.4.0...master[Check the HEAD diff] - Keep raw user agent information after parsing as user_agent_raw in Filebeat modules. {pull}7823[7832] - Make docker input check if container strings are empty {pull}7960[7960] - Add tag "truncated" to "log.flags" if incoming line is longer than configured limit. {pull}7991[7991] +- Add tag "multiline" to "log.flags" if event consists of multiple lines. {pull}7997[7997] *Heartbeat* diff --git a/filebeat/module/elasticsearch/server/test/test.log-expected.json b/filebeat/module/elasticsearch/server/test/test.log-expected.json index d58429547ca5..f53a28cf9fb4 100644 --- a/filebeat/module/elasticsearch/server/test/test.log-expected.json +++ b/filebeat/module/elasticsearch/server/test/test.log-expected.json @@ -167,6 +167,9 @@ "fileset.module": "elasticsearch", "fileset.name": "server", "input.type": "log", + "log.flags": [ + "multiline" + ], "log.level": "WARN", "message": "master left (reason = failed to ping, tried [3] times, each with maximum [30s] timeout), current nodes: nodes:\n {srvmulpvlsk252_md}{uc5xdiQgRhaBIY-sszgjvQ}{X9pC0t1UQQix_NNOM0J6JQ}{srvmulpvlsk252.loganalytics.santanderuk.corp}{180.39.9.93:9300}{ml.max_open_jobs=10, ml.enabled=true}, local\n {srvmulpvlsk258_md}{HgW6EDn5QCmWVmICy4saHw}{o8zku7OJR4CTp0IjY8Ag4Q}{srvmulpvlsk258.loganalytics.santanderuk.corp}{180.39.9.99:9300}{ml.max_open_jobs=10, ml.enabled=true}\n {srvmulpvlsk250_md}{igrwSoPGSJ6u_5b8k26tgQ}{PuRqciBFRbiQvL2_lS7LrQ}{srvmulpvlsk250.loganalytics.santanderuk.corp}{180.39.9.91:9300}{ml.max_open_jobs=10, ml.enabled=true}, master\n {srvmulpvlsk254_id}{wZYeAh2URc2NwBIHZolLWQ}{3nduupo-TzSPaXjQaNu4Sg}{srvmulpvlsk254.loganalytics.santanderuk.corp}{180.39.9.95:9300}{ml.max_open_jobs=10, ml.enabled=true}", "offset": 2008, @@ -179,6 +182,9 @@ "fileset.module": "elasticsearch", "fileset.name": "server", "input.type": "log", + "log.flags": [ + "multiline" + ], "log.level": "WARN", "message": "path: /_xpack/monitoring/_bulk, params: {system_id=logstash, system_api_version=2, interval=1s}\norg.elasticsearch.cluster.block.ClusterBlockException: blocked by: [SERVICE_UNAVAILABLE/2/no master];\n at org.elasticsearch.cluster.block.ClusterBlocks.globalBlockedException(ClusterBlocks.java:165) ~[elasticsearch-5.6.3.jar:5.6.3]\n at org.elasticsearch.cluster.block.ClusterBlocks.globalBlockedRaiseException(ClusterBlocks.java:151) ~[elasticsearch-5.6.3.jar:5.6.3]\n at org.elasticsearch.xpack.monitoring.action.TransportMonitoringBulkAction.doExecute(TransportMonitoringBulkAction.java:57) ~[?:?]\n at org.elasticsearch.xpack.monitoring.action.TransportMonitoringBulkAction.doExecute(TransportMonitoringBulkAction.java:40) ~[?:?]\n at org.elasticsearch.action.support.TransportAction.doExecute(TransportAction.java:146) ~[elasticsearch-5.6.3.jar:5.6.3]\n at org.elasticsearch.action.support.TransportAction$RequestFilterChain.proceed(TransportAction.java:170) ~[elasticsearch-5.6.3.jar:5.6.3]\n at org.elasticsearch.xpack.security.action.filter.SecurityActionFilter.lambda$apply$1(SecurityActionFilter.java:133) ~[?:?]\n at org.elasticsearch.action.ActionListener$1.onResponse(ActionListener.java:59) ~[elasticsearch-5.6.3.jar:5.6.3]\n at org.elasticsearch.xpack.security.action.filter.SecurityActionFilter.lambda$authorizeRequest$4(SecurityActionFilter.java:208) ~[?:?]\n at org.elasticsearch.xpack.security.authz.AuthorizationUtils$AsyncAuthorizer.maybeRun(AuthorizationUtils.java:127) ~[?:?]\n at org.elasticsearch.xpack.security.authz.AuthorizationUtils$AsyncAuthorizer.setRunAsRoles(AuthorizationUtils.java:121) ~[?:?]\n at org.elasticsearch.xpack.security.authz.AuthorizationUtils$AsyncAuthorizer.authorize(AuthorizationUtils.java:109) ~[?:?]\n at org.elasticsearch.xpack.security.action.filter.SecurityActionFilter.authorizeRequest(SecurityActionFilter.java:210) ~[?:?]\n at org.elasticsearch.xpack.security.action.filter.SecurityActionFilter.lambda$applyInternal$3(SecurityActionFilter.java:186) ~[?:?]\n at org.elasticsearch.action.ActionListener$1.onResponse(ActionListener.java:59) ~[elasticsearch-5.6.3.jar:5.6.3]\n at org.elasticsearch.xpack.security.authc.AuthenticationService$Authenticator.lambda$authenticateAsync$2(AuthenticationService.java:212) ~[?:?]\n at org.elasticsearch.xpack.security.authc.AuthenticationService$Authenticator.lambda$lookForExistingAuthentication$4(AuthenticationService.java:246) ~[?:?]\n at org.elasticsearch.xpack.security.authc.AuthenticationService$Authenticator.lookForExistingAuthentication(AuthenticationService.java:257) ~[?:?]\n at org.elasticsearch.xpack.security.authc.AuthenticationService$Authenticator.authenticateAsync(AuthenticationService.java:210) ~[?:?]\n at org.elasticsearch.xpack.security.authc.AuthenticationService$Authenticator.access$000(AuthenticationService.java:159) ~[?:?]\n at org.elasticsearch.xpack.security.authc.AuthenticationService.authenticate(AuthenticationService.java:122) ~[?:?]\n at org.elasticsearch.xpack.security.action.filter.SecurityActionFilter.applyInternal(SecurityActionFilter.java:185) ~[?:?]\n at org.elasticsearch.xpack.security.action.filter.SecurityActionFilter.apply(SecurityActionFilter.java:145) ~[?:?]\n at org.elasticsearch.action.support.TransportAction$RequestFilterChain.proceed(TransportAction.java:168) ~[elasticsearch-5.6.3.jar:5.6.3]\n at org.elasticsearch.action.support.TransportAction.execute(TransportAction.java:142) ~[elasticsearch-5.6.3.jar:5.6.3]\n at org.elasticsearch.action.support.TransportAction.execute(TransportAction.java:84) ~[elasticsearch-5.6.3.jar:5.6.3]\n at org.elasticsearch.client.node.NodeClient.executeLocally(NodeClient.java:83) ~[elasticsearch-5.6.3.jar:5.6.3]\n at org.elasticsearch.client.node.NodeClient.doExecute(NodeClient.java:72) ~[elasticsearch-5.6.3.jar:5.6.3]\n at org.elasticsearch.client.support.AbstractClient.execute(AbstractClient.java:408) ~[elasticsearch-5.6.3.jar:5.6.3]\n at org.elasticsearch.action.ActionRequestBuilder.execute(ActionRequestBuilder.java:80) ~[elasticsearch-5.6.3.jar:5.6.3]\n at org.elasticsearch.xpack.monitoring.rest.action.RestMonitoringBulkAction.lambda$doPrepareRequest$0(RestMonitoringBulkAction.java:77) ~[?:?]\n at org.elasticsearch.rest.BaseRestHandler.handleReques", "offset": 2907, @@ -191,6 +197,9 @@ "fileset.module": "elasticsearch", "fileset.name": "server", "input.type": "log", + "log.flags": [ + "multiline" + ], "log.level": "WARN", "message": "path: /_xpack/license, params: {}\norg.elasticsearch.discovery.MasterNotDiscoveredException: NodeDisconnectedException[[srvmulpvlsk250_md][180.39.9.91:9300][cluster:monitor/xpack/license/get] disconnected]\n at org.elasticsearch.action.support.master.TransportMasterNodeAction$AsyncSingleAction$4.onTimeout(TransportMasterNodeAction.java:209) ~[elasticsearch-5.6.3.jar:5.6.3]\n at org.elasticsearch.cluster.ClusterStateObserver$ContextPreservingListener.onTimeout(ClusterStateObserver.java:311) ~[elasticsearch-5.6.3.jar:5.6.3]\n at org.elasticsearch.cluster.ClusterStateObserver.waitForNextChange(ClusterStateObserver.java:139) ~[elasticsearch-5.6.3.jar:5.6.3]\n at org.elasticsearch.cluster.ClusterStateObserver.waitForNextChange(ClusterStateObserver.java:111) ~[elasticsearch-5.6.3.jar:5.6.3]\n at org.elasticsearch.action.support.master.TransportMasterNodeAction$AsyncSingleAction.retry(TransportMasterNodeAction.java:194) ~[elasticsearch-5.6.3.jar:5.6.3]\n at org.elasticsearch.action.support.master.TransportMasterNodeAction$AsyncSingleAction.access$500(TransportMasterNodeAction.java:107) ~[elasticsearch-5.6.3.jar:5.6.3]\n at org.elasticsearch.action.support.master.TransportMasterNodeAction$AsyncSingleAction$3.handleException(TransportMasterNodeAction.java:183) ~[elasticsearch-5.6.3.jar:5.6.3]\n at org.elasticsearch.transport.TransportService$ContextRestoreResponseHandler.handleException(TransportService.java:1067) ~[elasticsearch-5.6.3.jar:5.6.3]\n at org.elasticsearch.transport.TransportService$ContextRestoreResponseHandler.handleException(TransportService.java:1067) ~[elasticsearch-5.6.3.jar:5.6.3]\n at org.elasticsearch.transport.TransportService$Adapter.lambda$onConnectionClosed$6(TransportService.java:893) ~[elasticsearch-5.6.3.jar:5.6.3]\n at org.elasticsearch.common.util.concurrent.ThreadContext$ContextPreservingRunnable.run(ThreadContext.java:569) [elasticsearch-5.6.3.jar:5.6.3]\n at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) [?:1.8.0_161]\n at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) [?:1.8.0_161]\n at java.lang.Thread.run(Thread.java:748) [?:1.8.0_161]\nCaused by: org.elasticsearch.transport.NodeDisconnectedException: [srvmulpvlsk250_md][180.39.9.91:9300][cluster:monitor/xpack/license/get] disconnected", "offset": 7412, @@ -206,6 +215,9 @@ "fileset.module": "elasticsearch", "fileset.name": "server", "input.type": "log", + "log.flags": [ + "multiline" + ], "log.level": "WARN", "message": "duration [3.8s], collections [1]/[4.3s], total [3.8s]/[8.8h], memory [16.5gb]->[15.7gb]/[30.8gb], all_po\nols {[young] [1.2gb]->[24mb]/[1.4gb]}{[survivor] [191.3mb]->[191.3mb]/[191.3mb]}{[old] [15.1gb]->[15.5gb]/[29.1gb]}", "offset": 9873, @@ -246,6 +258,9 @@ "fileset.module": "elasticsearch", "fileset.name": "server", "input.type": "log", + "log.flags": [ + "multiline" + ], "log.level": "WARN", "message": "monitoring execution failed\norg.elasticsearch.xpack.monitoring.exporter.ExportException: Exception when closing export bulk\n at org.elasticsearch.xpack.monitoring.exporter.ExportBulk$1$1.(ExportBulk.java:106) ~[?:?]\n at org.elasticsearch.xpack.monitoring.exporter.ExportBulk$1.onFailure(ExportBulk.java:104) ~[?:?]\n at org.elasticsearch.xpack.monitoring.exporter.ExportBulk$Compound$1.onResponse(ExportBulk.java:217) ~[?:?]\n at org.elasticsearch.xpack.monitoring.exporter.ExportBulk$Compound$1.onResponse(ExportBulk.java:211) ~[?:?]\n at org.elasticsearch.xpack.common.IteratingActionListener.onResponse(IteratingActionListener.java:108) ~[?:?]\n at org.elasticsearch.action.ActionListener$1.onResponse(ActionListener.java:59) [elasticsearch-5.6.3.jar:5.6.3]\n at org.elasticsearch.xpack.monitoring.exporter.http.HttpExportBulk$1.onSuccess(HttpExportBulk.java:115) [x-pack-5.6.3.jar:5.6.3]\n at org.elasticsearch.client.RestClient$FailureTrackingResponseListener.onSuccess(RestClient.java:597) [elasticsearch-rest-client-5.6.3.jar:5.6.3]\n at org.elasticsearch.client.RestClient$1.completed(RestClient.java:352) [elasticsearch-rest-client-5.6.3.jar:5.6.3]\n at org.elasticsearch.client.RestClient$1.completed(RestClient.java:343) [elasticsearch-rest-client-5.6.3.jar:5.6.3]\n at org.apache.http.concurrent.BasicFuture.completed(BasicFuture.java:119) [httpcore-4.4.5.jar:4.4.5]\n at org.apache.http.impl.nio.client.DefaultClientExchangeHandlerImpl.responseCompleted(DefaultClientExchangeHandlerImpl.java:177) [httpasyncclient-4.1.2.jar:4.1.2]\n at org.apache.http.nio.protocol.HttpAsyncRequestExecutor.processResponse(HttpAsyncRequestExecutor.java:436) [httpcore-nio-4.4.5.jar:4.4.5]\n at org.apache.http.nio.protocol.HttpAsyncRequestExecutor.inputReady(HttpAsyncRequestExecutor.java:326) [httpcore-nio-4.4.5.jar:4.4.5]\n at org.apache.http.impl.nio.DefaultNHttpClientConnection.consumeInput(DefaultNHttpClientConnection.java:265) [httpcore-nio-4.4.5.jar:4.4.5]\n at org.apache.http.impl.nio.client.InternalIODispatch.onInputReady(InternalIODispatch.java:81) [httpasyncclient-4.1.2.jar:4.1.2]\n at org.apache.http.impl.nio.client.InternalIODispatch.onInputReady(InternalIODispatch.java:39) [httpasyncclient-4.1.2.jar:4.1.2]\n at org.apache.http.impl.nio.reactor.AbstractIODispatch.inputReady(AbstractIODispatch.java:114) [httpcore-nio-4.4.5.jar:4.4.5]\n at org.apache.http.impl.nio.reactor.BaseIOReactor.readable(BaseIOReactor.java:162) [httpcore-nio-4.4.5.jar:4.4.5]\n at org.apache.http.impl.nio.reactor.AbstractIOReactor.processEvent(AbstractIOReactor.java:337) [httpcore-nio-4.4.5.jar:4.4.5]\n at org.apache.http.impl.nio.reactor.AbstractIOReactor.processEvents(AbstractIOReactor.java:315) [httpcore-nio-4.4.5.jar:4.4.5]\n at org.apache.http.impl.nio.reactor.AbstractIOReactor.execute(AbstractIOReactor.java:276) [httpcore-nio-4.4.5.jar:4.4.5]\n at org.apache.http.impl.nio.reactor.BaseIOReactor.execute(BaseIOReactor.java:104) [httpcore-nio-4.4.5.jar:4.4.5]\n at org.apache.http.impl.nio.reactor.AbstractMultiworkerIOReactor$Worker.run(AbstractMultiworkerIOReactor.java:588) [httpcore-nio-4.4.5.jar:4.4.5]\n at java.lang.Thread.run(Thread.java:748) [?:1.8.0_161]\n", "offset": 10648, diff --git a/filebeat/module/elasticsearch/slowlog/test/test.log-expected.json b/filebeat/module/elasticsearch/slowlog/test/test.log-expected.json index b1c1828b6cad..c2a6ba286342 100644 --- a/filebeat/module/elasticsearch/slowlog/test/test.log-expected.json +++ b/filebeat/module/elasticsearch/slowlog/test/test.log-expected.json @@ -127,6 +127,9 @@ "fileset.module": "elasticsearch", "fileset.name": "slowlog", "input.type": "log", + "log.flags": [ + "multiline" + ], "log.level": "INFO", "message": "[2018-07-04T21:51:30,411][INFO ][index.indexing.slowlog.index] [v_VJhjV] [metricbeat-6.3.0-2018.07.04/VLKxBLvUSYuIMKzpacGjRg] took[1.7ms], took_millis[1], type[doc], id[s01HZ2QBk9jw4gtgaFtn], routing[], source[\n{\n \"@timestamp\":\"2018-07-04T21:27:30.730Z\",\n \"metricset\":{\n \"name\":\"network\",\n \"module\":\"system\",\n \"rtt\":7264},\n \"system\":{\n \"network\":{\n \"name\":\"lo0\",\n \"in\":{\n \"errors\":0,\n \"dropped\":0,\n \"bytes\":77666873,\n \"packets\":244595},\n \"out\":{\n \"packets\":244595,\n \"bytes\":77666873,\n \"errors\":0,\n \"dropped\":0\n }\n }\n },\n \"beat\":{\n \"name\":\"Rados-MacBook-Pro.local\",\n \"hostname\":\"Rados-MacBook-Pro.local\",\n \"version\":\"6.3.0\"\n },\n \"host\":{\n \"name\":\"Rados-MacBook-Pro.local\"\n }\n }]", "offset": 4753, diff --git a/filebeat/module/icinga/main/test/test.log-expected.json b/filebeat/module/icinga/main/test/test.log-expected.json index 57174332ba33..59d4822ce5d8 100644 --- a/filebeat/module/icinga/main/test/test.log-expected.json +++ b/filebeat/module/icinga/main/test/test.log-expected.json @@ -18,6 +18,9 @@ "icinga.main.message": "Notification command for object 'demo!load' (PID: 19401, arguments: '/etc/icinga2/scripts/mail-service-notification.sh') terminated with exit code 127, output: /etc/icinga2/scripts/mail-service-notification.sh: 20: /etc/icinga2/scripts/mail-service-notification.sh: mail: not found\n/usr/bin/printf: write error: Broken pipe\n", "icinga.main.severity": "warning", "input.type": "log", + "log.flags": [ + "multiline" + ], "offset": 133, "prospector.type": "log" }, diff --git a/filebeat/module/postgresql/log/test/postgresql-9.6-debian-with-slowlog.log-expected.json b/filebeat/module/postgresql/log/test/postgresql-9.6-debian-with-slowlog.log-expected.json index 9ee8cba16172..b17481ca76b3 100644 --- a/filebeat/module/postgresql/log/test/postgresql-9.6-debian-with-slowlog.log-expected.json +++ b/filebeat/module/postgresql/log/test/postgresql-9.6-debian-with-slowlog.log-expected.json @@ -76,6 +76,9 @@ "fileset.module": "postgresql", "fileset.name": "log", "input.type": "log", + "log.flags": [ + "multiline" + ], "message": "2017-07-31 13:36:43.557 CEST [4983] postgres@postgres LOG: duration: 37.118 ms statement: SELECT d.datname as \"Name\",\n\t pg_catalog.pg_get_userbyid(d.datdba) as \"Owner\",\n\t pg_catalog.pg_encoding_to_char(d.encoding) as \"Encoding\",\n\t d.datcollate as \"Collate\",\n\t d.datctype as \"Ctype\",\n\t pg_catalog.array_to_string(d.datacl, E'\\n') AS \"Access privileges\"\n\tFROM pg_catalog.pg_database d\n\tORDER BY 1;", "offset": 445, "postgresql.log.database": "postgres", @@ -93,6 +96,9 @@ "fileset.module": "postgresql", "fileset.name": "log", "input.type": "log", + "log.flags": [ + "multiline" + ], "message": "2017-07-31 13:36:44.104 CEST [4986] postgres@postgres LOG: duration: 2.895 ms statement: SELECT d.datname as \"Name\",\n\t pg_catalog.pg_get_userbyid(d.datdba) as \"Owner\",\n\t pg_catalog.pg_encoding_to_char(d.encoding) as \"Encoding\",\n\t d.datcollate as \"Collate\",\n\t d.datctype as \"Ctype\",\n\t pg_catalog.array_to_string(d.datacl, E'\\n') AS \"Access privileges\"\n\tFROM pg_catalog.pg_database d\n\tORDER BY 1;", "offset": 873, "postgresql.log.database": "postgres", @@ -110,6 +116,9 @@ "fileset.module": "postgresql", "fileset.name": "log", "input.type": "log", + "log.flags": [ + "multiline" + ], "message": "2017-07-31 13:36:44.642 CEST [4989] postgres@postgres LOG: duration: 2.809 ms statement: SELECT d.datname as \"Name\",\n\t pg_catalog.pg_get_userbyid(d.datdba) as \"Owner\",\n\t pg_catalog.pg_encoding_to_char(d.encoding) as \"Encoding\",\n\t d.datcollate as \"Collate\",\n\t d.datctype as \"Ctype\",\n\t pg_catalog.array_to_string(d.datacl, E'\\n') AS \"Access privileges\"\n\tFROM pg_catalog.pg_database d\n\tORDER BY 1;", "offset": 1300, "postgresql.log.database": "postgres", @@ -159,6 +168,9 @@ "fileset.module": "postgresql", "fileset.name": "log", "input.type": "log", + "log.flags": [ + "multiline" + ], "message": "2017-07-31 13:39:21.025 CEST [5404] postgres@postgres LOG: duration: 37.598 ms statement: SELECT n.nspname as \"Schema\",\n\t c.relname as \"Name\",\n\t CASE c.relkind WHEN 'r' THEN 'table' WHEN 'v' THEN 'view' WHEN 'm' THEN 'materialized view' WHEN 'i' THEN 'index' WHEN 'S' THEN 'sequence' WHEN 's' THEN 'special' WHEN 'f' THEN 'foreign table' END as \"Type\",\n\t pg_catalog.pg_get_userbyid(c.relowner) as \"Owner\"\n\tFROM pg_catalog.pg_class c\n\t LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace\n\tWHERE c.relkind IN ('r','')\n\t AND n.nspname <> 'pg_catalog'\n\t AND n.nspname <> 'information_schema'\n\t AND n.nspname !~ '^pg_toast'\n\t AND pg_catalog.pg_table_is_visible(c.oid)\n\tORDER BY 1,2;", "offset": 1907, "postgresql.log.database": "postgres", @@ -210,6 +222,9 @@ "fileset.module": "postgresql", "fileset.name": "log", "input.type": "log", + "log.flags": [ + "multiline" + ], "message": "2017-07-31 13:40:54.310 CEST [5502] postgres@clients LOG: duration: 26.082 ms statement: SELECT n.nspname as \"Schema\",\n\t c.relname as \"Name\",\n\t CASE c.relkind WHEN 'r' THEN 'table' WHEN 'v' THEN 'view' WHEN 'm' THEN 'materialized view' WHEN 'i' THEN 'index' WHEN 'S' THEN 'sequence' WHEN 's' THEN 'special' WHEN 'f' THEN 'foreign table' END as \"Type\",\n\t pg_catalog.pg_get_userbyid(c.relowner) as \"Owner\"\n\tFROM pg_catalog.pg_class c\n\t LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace\n\tWHERE c.relkind IN ('r','')\n\t AND n.nspname <> 'pg_catalog'\n\t AND n.nspname <> 'information_schema'\n\t AND n.nspname !~ '^pg_toast'\n\t AND pg_catalog.pg_table_is_visible(c.oid)\n\tORDER BY 1,2;", "offset": 2847, "postgresql.log.database": "clients", diff --git a/filebeat/module/system/syslog/test/darwin-syslog-sample.log-expected.json b/filebeat/module/system/syslog/test/darwin-syslog-sample.log-expected.json index 8bf8a368e5cb..4d667d28a17d 100644 --- a/filebeat/module/system/syslog/test/darwin-syslog-sample.log-expected.json +++ b/filebeat/module/system/syslog/test/darwin-syslog-sample.log-expected.json @@ -4,6 +4,9 @@ "fileset.module": "system", "fileset.name": "syslog", "input.type": "log", + "log.flags": [ + "multiline" + ], "offset": 0, "prospector.type": "log", "system.syslog.hostname": "a-mac-with-esc-key", diff --git a/filebeat/reader/multiline/multiline.go b/filebeat/reader/multiline/multiline.go index 2e974c19b235..ae31c399ecb9 100644 --- a/filebeat/reader/multiline/multiline.go +++ b/filebeat/reader/multiline/multiline.go @@ -273,6 +273,10 @@ func (mlr *Reader) finalize() reader.Message { mlr.message.AddFlagsWithKey("log.flags", "truncated") } + if mlr.numLines > 1 { + mlr.message.AddFlagsWithKey("log.flags", "multiline") + } + // Copy message from existing content msg := mlr.message diff --git a/filebeat/reader/multiline/multiline_test.go b/filebeat/reader/multiline/multiline_test.go index 1c6ea96c6f38..96dfd6922ccc 100644 --- a/filebeat/reader/multiline/multiline_test.go +++ b/filebeat/reader/multiline/multiline_test.go @@ -232,6 +232,7 @@ func testMultilineTruncated(t *testing.T, cfg Config, events int, truncated bool for _, message := range messages { found := false + multiline := false statusFlags, err := message.Fields.GetValue("log.flags") if err != nil { if !truncated { @@ -247,6 +248,9 @@ func testMultilineTruncated(t *testing.T, cfg Config, events int, truncated bool if f == "truncated" { found = true } + if f == "multiline" { + multiline = true + } } default: t.Fatalf("incorrect type for log.flags") @@ -257,6 +261,7 @@ func testMultilineTruncated(t *testing.T, cfg Config, events int, truncated bool } else { assert.False(t, found) } + assert.True(t, multiline) } } diff --git a/filebeat/tests/system/test_json.py b/filebeat/tests/system/test_json.py index d98b6025aa6e..1d6b0bee8e19 100644 --- a/filebeat/tests/system/test_json.py +++ b/filebeat/tests/system/test_json.py @@ -64,40 +64,6 @@ def test_docker_logs_filtering(self): assert all(o["stream"] == "stdout" for o in output) assert all("windows" not in o["log"] for o in output) - def test_docker_logs_multiline(self): - """ - Should be able to do multiline on docker logs. - """ - self.render_config_template( - path=os.path.abspath(self.working_dir) + "/log/*", - json=dict(message_key="log", keys_under_root=True), - multiline=True, - pattern="^\[log\]", - match="after", - negate="true" - ) - - os.mkdir(self.working_dir + "/log/") - self.copy_files(["logs/docker_multiline.log"], - target_dir="log") - - proc = self.start_beat() - self.wait_until( - lambda: self.output_has(lines=3), - max_timeout=10) - - proc.check_kill_and_wait() - - output = self.read_output() - assert len(output) == 3 - - assert all("time" in o for o in output) - assert all("log" in o for o in output) - assert all("message" not in o for o in output) - assert all(o["stream"] == "stdout" for o in output) - assert output[1]["log"] == \ - "[log] This one is\n on multiple\n lines" - def test_simple_json_overwrite(self): """ Should be able to overwrite keys when requested.