Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Ecs logs integration #2972

Closed
wants to merge 4 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions packages/ecs_log/changelog.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
# newer versions go on top
- version: "0.0.1"
changes:
- description: Initial release of the package
type: enhancement
link: https://github.com/elastic/integrations/pull
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
{"@timestamp":"2022-04-01T12:09:12.375Z", "log.level": "INFO", "message":"With event.dataset", "event.dataset": "foo"}
{"@timestamp":"2022-04-01T12:09:12.379Z", "log.level": "INFO", "message":"With data_stream.dataset", "data_stream.dataset": "bar"}
{"@timestamp":"2022-04-01T12:09:12.379Z", "log.level": "INFO", "message":"With invalid chars in dataset", "data_stream.dataset": "my-service"}
{"@timestamp":"2022-04-01T14:08:40.199Z", "log.level":"DEBUG", "message":"Without dataset"}
{"@timestamp":"2022-04-01T14:08:40.199Z", "log.level":"DEBUG", "message":"With stack trace", "error.stack_trace": "Exception in thread \"main\" java.lang.NullPointerException\n at com.example.myproject.Book.getTitle(Book.java:16)\n at com.example.myproject.Author.getBookTitles(Author.java:25)\n at com.example.myproject.Bootstrap.main(Bootstrap.java:14)"}
{"@timestamp":"2022-04-01T14:08:40.199Z", "log.level":"DEBUG", "message":"With stack trace as array", "error.stack_trace": [
"Exception in thread \"main\" java.lang.NullPointerException\n",
" at com.example.myproject.Book.getTitle(Book.java:16)\n",
" at com.example.myproject.Author.getBookTitles(Author.java:25)\n",
" at com.example.myproject.Bootstrap.main(Bootstrap.java:14)"]}
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
multiline:
first_line_pattern: '^{'
fields:
ecs:
version: "1.5.0"
event:
dataset: ecs_router
data_stream:
type: logs
dataset: ecs_router
namespace: default
Original file line number Diff line number Diff line change
@@ -0,0 +1,118 @@
{
"expected": [
{
"@timestamp": "2022-04-01T12:09:12.375Z",
"data_stream": {
"dataset": "foo",
"namespace": "default",
"type": "logs"
},
"ecs": {
"version": "1.5.0"
},
"event": {
"dataset": "foo"
},
"log": {
"level": "INFO"
},
"message": "With event.dataset"
},
{
"@timestamp": "2022-04-01T12:09:12.379Z",
"data_stream": {
"dataset": "bar",
"namespace": "default",
"type": "logs"
},
"ecs": {
"version": "1.5.0"
},
"event": {
"dataset": "bar"
},
"log": {
"level": "INFO"
},
"message": "With data_stream.dataset"
},
{
"@timestamp": "2022-04-01T12:09:12.379Z",
"data_stream": {
"dataset": "my_service",
"namespace": "default",
"type": "logs"
},
"ecs": {
"version": "1.5.0"
},
"event": {
"dataset": "my_service"
},
"log": {
"level": "INFO"
},
"message": "With invalid chars in dataset"
},
{
"@timestamp": "2022-04-01T14:08:40.199Z",
"data_stream": {
"dataset": "generic",
"namespace": "default",
"type": "logs"
},
"ecs": {
"version": "1.5.0"
},
"event": {
"dataset": "generic"
},
"log": {
"level": "DEBUG"
},
"message": "Without dataset"
},
{
"@timestamp": "2022-04-01T14:08:40.199Z",
"data_stream": {
"dataset": "generic",
"namespace": "default",
"type": "logs"
},
"ecs": {
"version": "1.5.0"
},
"error": {
"stack_trace": "Exception in thread \"main\" java.lang.NullPointerException\n at com.example.myproject.Book.getTitle(Book.java:16)\n at com.example.myproject.Author.getBookTitles(Author.java:25)\n at com.example.myproject.Bootstrap.main(Bootstrap.java:14)"
},
"event": {
"dataset": "generic"
},
"log": {
"level": "DEBUG"
},
"message": "With stack trace"
},
{
"@timestamp": "2022-04-01T14:08:40.199Z",
"data_stream": {
"dataset": "generic",
"namespace": "default",
"type": "logs"
},
"ecs": {
"version": "1.5.0"
},
"error": {
"stack_trace": "Exception in thread \"main\" java.lang.NullPointerException\n\n at com.example.myproject.Book.getTitle(Book.java:16)\n\n at com.example.myproject.Author.getBookTitles(Author.java:25)\n\n at com.example.myproject.Bootstrap.main(Bootstrap.java:14)"
},
"event": {
"dataset": "generic"
},
"log": {
"level": "DEBUG"
},
"message": "With stack trace as array"
}
]
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
paths:
{{#each paths}}
- {{this}}
{{/each}}

processors:
- add_host_metadata: ~
- add_cloud_metadata: ~
- add_docker_metadata: ~
- add_kubernetes_metadata: ~
{{custom}}
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
---
processors:
- remove:
description: |
This data stream is meant for routing only, we want to avoid that data is written to it.
We'll use the dataset that is specified in the ECS JSON log message, or use 'generic' as the default.
field: data_stream.dataset
ignore_missing: true
- remove:
field: event.dataset
ignore_missing: true
- pipeline:
name: '{{ IngestPipeline "logs-ecs-json-pipeline" }}'
if: |-
def message = ctx.message;
return message != null
&& message.startsWith('{')
&& message.endsWith('}')
&& message.contains('"@timestamp"')
- set:
description: Uses event.dataset as a default for data_stream.dataset if the latter is not set.
field: data_stream.dataset
copy_from: event.dataset
if: ctx.event?.dataset instanceof String && ctx.event.dataset.length() > 1
override: false
- script:
source: |
ctx.data_stream.dataset = /[\/*?"<>|, #:-]/.matcher(ctx.data_stream.dataset).replaceAll('_')
if: ctx.data_stream?.dataset != null
- script:
source: |
ctx.data_stream.namespace = /[\/*?"<>|, #:]/.matcher(ctx.data_stream.namespace).replaceAll('_')
if: ctx.data_stream?.namespace != null
- set:
field: data_stream.type
value: logs
- set:
field: data_stream.dataset
value: generic
override: false
- set:
field: data_stream.namespace
value: default
override: false
- set:
field: event.dataset
copy_from: data_stream.dataset
- set:
field: _index
value: logs-{{{data_stream.dataset}}}-{{{data_stream.namespace}}}
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
---
processors:
- rename:
field: message
target_field: _ecs_json_message
ignore_missing: true
- json:
field: _ecs_json_message
add_to_root: true
add_to_root_conflict_strategy: merge
allow_duplicate_keys: true
if: ctx.containsKey('_ecs_json_message')
on_failure:
- rename:
field: _ecs_json_message
target_field: message
ignore_missing: true
- set:
field: error.message
value: Error while parsing JSON
override: false
- remove:
field: _ecs_json_message
ignore_missing: true
- dot_expander:
field: "*"
override: true
- join:
field: error.stack_trace
separator: "\n"
if: ctx.error?.stack_trace instanceof Collection
23 changes: 23 additions & 0 deletions packages/ecs_log/data_stream/ecs_router/fields/base-fields.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
# no data is meant to be written to the data stream
# however, in order for package validation and tests to pass, we need to add some fields
- name: data_stream.type
type: constant_keyword
description: Data stream type.
- name: data_stream.dataset
type: constant_keyword
description: Data stream dataset.
- name: data_stream.namespace
type: constant_keyword
description: Data stream namespace.
- name: '@timestamp'
type: date
description: Event timestamp.
# only used for tests
- name: ecs.version
external: ecs
- name: error.stack_trace
external: ecs
- name: log.level
external: ecs
- name: message
external: ecs
21 changes: 21 additions & 0 deletions packages/ecs_log/data_stream/ecs_router/manifest.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
title: ECS Log Router Dataset
type: logs
dataset: ecs_router
streams:
- input: logfile
description: Collect your custom log files.
title: Collect log files
vars:
- name: paths
required: true
title: Log file path
description: Path to log files to be collected
type: text
multi: true
- name: custom
title: Custom configurations
description: >
Here YAML configuration options can be used to be added to your configuration. Be careful using this as it might break your configuration file.

type: yaml
default: ""
3 changes: 3 additions & 0 deletions packages/ecs_log/docs/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
# ECS Log Package

The ECS log package is used to ingest ECS log files.
4 changes: 4 additions & 0 deletions packages/ecs_log/img/icon.svg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
24 changes: 24 additions & 0 deletions packages/ecs_log/manifest.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
format_version: 1.0.0
name: ecs_log
title: ECS Logs
description: >-
Collect ECS logs with Elastic Agent.
type: integration
version: 0.0.1
release: experimental
license: basic
categories:
- custom
policy_templates:
- name: logs
title: ECS logs
description: Collect your ECS log files.
inputs:
- type: logfile
title: ECS log file
description: Collect your ECS log files.
icons:
- src: "/img/icon.svg"
type: "image/svg+xml"
owner:
github: elastic/integrations