From 5bf549580304e7d558c2762da3c1f5a9a4af7faf Mon Sep 17 00:00:00 2001 From: spacewander Date: Tue, 10 Jan 2023 11:00:02 +0800 Subject: [PATCH] feat: add custom log format to each logger Signed-off-by: spacewander --- apisix/plugins/clickhouse-logger.lua | 14 +- apisix/plugins/elasticsearch-logger.lua | 15 +- apisix/plugins/file-logger.lua | 13 +- apisix/plugins/google-cloud-logging.lua | 72 +++++---- apisix/plugins/http-logger.lua | 15 +- apisix/plugins/kafka-logger.lua | 14 +- apisix/plugins/loggly.lua | 15 +- apisix/plugins/rocketmq-logger.lua | 14 +- apisix/plugins/skywalking-logger.lua | 14 +- apisix/plugins/sls-logger.lua | 13 +- apisix/plugins/splunk-hec-logging.lua | 35 ++++- apisix/plugins/syslog.lua | 15 +- apisix/plugins/tcp-logger.lua | 17 ++- apisix/plugins/tencent-cloud-cls.lua | 17 +-- apisix/plugins/udp-logger.lua | 17 ++- apisix/stream/plugins/syslog.lua | 9 +- apisix/utils/log-util.lua | 41 ++++++ .../en/latest/plugins/google-cloud-logging.md | 33 +++++ docs/en/latest/plugins/splunk-hec-logging.md | 33 +++++ docs/en/latest/plugins/tcp-logger.md | 33 +++++ docs/en/latest/plugins/udp-logger.md | 33 +++++ .../zh/latest/plugins/google-cloud-logging.md | 32 ++++ docs/zh/latest/plugins/splunk-hec-logging.md | 32 ++++ docs/zh/latest/plugins/tcp-logger.md | 32 ++++ docs/zh/latest/plugins/udp-logger.md | 32 ++++ t/plugin/elasticsearch-logger.t | 8 +- t/plugin/google-cloud-logging2.t | 139 ++++++++++++++++++ t/plugin/splunk-hec-logging.t | 109 ++++++++++++++ t/plugin/tcp-logger.t | 125 ++++++++++++++++ t/plugin/udp-logger.t | 125 ++++++++++++++++ t/stream-plugin/syslog.t | 5 + 31 files changed, 926 insertions(+), 195 deletions(-) diff --git a/apisix/plugins/clickhouse-logger.lua b/apisix/plugins/clickhouse-logger.lua index 70463a752190b..d5628983505e4 100644 --- a/apisix/plugins/clickhouse-logger.lua +++ b/apisix/plugins/clickhouse-logger.lua @@ -20,10 +20,8 @@ local log_util = require("apisix.utils.log-util") local core = require("apisix.core") local http = require("resty.http") local url = require("net.url") -local plugin = require("apisix.plugin") local math_random = math.random -local ngx = ngx local tostring = tostring local plugin_name = "clickhouse-logger" @@ -148,17 +146,7 @@ end function _M.log(conf, ctx) - local metadata = plugin.plugin_metadata(plugin_name) - core.log.info("metadata: ", core.json.delay_encode(metadata)) - local entry - - if metadata and metadata.value.log_format - and core.table.nkeys(metadata.value.log_format) > 0 - then - entry = log_util.get_custom_format_log(ctx, metadata.value.log_format) - else - entry = log_util.get_full_log(ngx, conf) - end + local entry = log_util.get_log_entry(plugin_name, conf, ctx) if batch_processor_manager:add_entry(conf, entry) then return diff --git a/apisix/plugins/elasticsearch-logger.lua b/apisix/plugins/elasticsearch-logger.lua index 5b7341319efc3..e797efdb0a7fe 100644 --- a/apisix/plugins/elasticsearch-logger.lua +++ b/apisix/plugins/elasticsearch-logger.lua @@ -19,7 +19,6 @@ local core = require("apisix.core") local http = require("resty.http") local log_util = require("apisix.utils.log-util") local bp_manager_mod = require("apisix.utils.batch-processor-manager") -local plugin = require("apisix.plugin") local ngx = ngx local str_format = core.string.format @@ -98,19 +97,7 @@ end local function get_logger_entry(conf, ctx) - local entry - local metadata = plugin.plugin_metadata(plugin_name) - core.log.info("metadata: ", core.json.delay_encode(metadata)) - if metadata and metadata.value.log_format - and core.table.nkeys(metadata.value.log_format) > 0 - then - entry = log_util.get_custom_format_log(ctx, metadata.value.log_format) - core.log.info("custom log format entry: ", core.json.delay_encode(entry)) - else - entry = log_util.get_full_log(ngx, conf) - core.log.info("full log entry: ", core.json.delay_encode(entry)) - end - + local entry = log_util.get_log_entry(plugin_name, conf, ctx) return core.json.encode({ create = { _index = conf.field.index, diff --git a/apisix/plugins/file-logger.lua b/apisix/plugins/file-logger.lua index 809a2348b3152..076a9e7e75923 100644 --- a/apisix/plugins/file-logger.lua +++ b/apisix/plugins/file-logger.lua @@ -16,7 +16,6 @@ -- local log_util = require("apisix.utils.log-util") local core = require("apisix.core") -local plugin = require("apisix.plugin") local ngx = ngx local io_open = io.open local is_apisix_or, process = pcall(require, "resty.apisix.process") @@ -149,17 +148,7 @@ function _M.body_filter(conf, ctx) end function _M.log(conf, ctx) - local metadata = plugin.plugin_metadata(plugin_name) - local entry - - if metadata and metadata.value.log_format - and core.table.nkeys(metadata.value.log_format) > 0 - then - entry = log_util.get_custom_format_log(ctx, metadata.value.log_format) - else - entry = log_util.get_full_log(ngx, conf) - end - + local entry = log_util.get_log_entry(plugin_name, conf, ctx) write_file_data(conf, entry) end diff --git a/apisix/plugins/google-cloud-logging.lua b/apisix/plugins/google-cloud-logging.lua index a071567167288..67fa82446ff48 100644 --- a/apisix/plugins/google-cloud-logging.lua +++ b/apisix/plugins/google-cloud-logging.lua @@ -16,7 +16,6 @@ -- local core = require("apisix.core") -local ngx = ngx local tostring = tostring local http = require("resty.http") local log_util = require("apisix.utils.log-util") @@ -95,6 +94,13 @@ local schema = { encrypt_fields = {"auth_config.private_key"}, } +local metadata_schema = { + type = "object", + properties = { + log_format = log_util.metadata_schema_log_format, + }, +} + local function send_to_google(oauth, entries) local http_new = http.new() @@ -163,32 +169,39 @@ end local function get_logger_entry(conf, ctx, oauth) - local entry = log_util.get_full_log(ngx, conf) - local google_entry = { - httpRequest = { - requestMethod = entry.request.method, - requestUrl = entry.request.url, - requestSize = entry.request.size, - status = entry.response.status, - responseSize = entry.response.size, - userAgent = entry.request.headers and entry.request.headers["user-agent"], - remoteIp = entry.client_ip, - serverIp = entry.upstream, - latency = tostring(core.string.format("%0.3f", entry.latency / 1000)) .. "s" - }, - jsonPayload = { - route_id = entry.route_id, - service_id = entry.service_id, - }, - labels = { - source = "apache-apisix-google-cloud-logging" - }, - timestamp = log_util.get_rfc3339_zulu_timestamp(), - resource = conf.resource, - insertId = ctx.var.request_id, - logName = core.string.format("projects/%s/logs/%s", oauth.project_id, - conf.log_id) + local entry, customized = log_util.get_log_entry(plugin_name, conf, ctx) + local google_entry + if not customized then + google_entry = { + httpRequest = { + requestMethod = entry.request.method, + requestUrl = entry.request.url, + requestSize = entry.request.size, + status = entry.response.status, + responseSize = entry.response.size, + userAgent = entry.request.headers and entry.request.headers["user-agent"], + remoteIp = entry.client_ip, + serverIp = entry.upstream, + latency = tostring(core.string.format("%0.3f", entry.latency / 1000)) .. "s" + }, + jsonPayload = { + route_id = entry.route_id, + service_id = entry.service_id, + }, + } + else + google_entry = { + jsonPayload = entry, + } + end + + google_entry.labels = { + source = "apache-apisix-google-cloud-logging" } + google_entry.timestamp = log_util.get_rfc3339_zulu_timestamp() + google_entry.resource = conf.resource + google_entry.insertId = ctx.var.request_id + google_entry.logName = core.string.format("projects/%s/logs/%s", oauth.project_id, conf.log_id) return google_entry end @@ -198,11 +211,16 @@ local _M = { version = 0.1, priority = 407, name = plugin_name, + metadata_schema = metadata_schema, schema = batch_processor_manager:wrap_schema(schema), } -function _M.check_schema(conf) +function _M.check_schema(conf, schema_type) + if schema_type == core.schema.TYPE_METADATA then + return core.schema.check(metadata_schema, conf) + end + return core.schema.check(schema, conf) end diff --git a/apisix/plugins/http-logger.lua b/apisix/plugins/http-logger.lua index 93cd8c9bef3ba..2de0995357757 100644 --- a/apisix/plugins/http-logger.lua +++ b/apisix/plugins/http-logger.lua @@ -20,9 +20,7 @@ local log_util = require("apisix.utils.log-util") local core = require("apisix.core") local http = require("resty.http") local url = require("net.url") -local plugin = require("apisix.plugin") -local ngx = ngx local tostring = tostring local ipairs = ipairs @@ -156,18 +154,7 @@ end function _M.log(conf, ctx) - local metadata = plugin.plugin_metadata(plugin_name) - core.log.info("metadata: ", core.json.delay_encode(metadata)) - - local entry - - if metadata and metadata.value.log_format - and core.table.nkeys(metadata.value.log_format) > 0 - then - entry = log_util.get_custom_format_log(ctx, metadata.value.log_format) - else - entry = log_util.get_full_log(ngx, conf) - end + local entry = log_util.get_log_entry(plugin_name, conf, ctx) if not entry.route_id then entry.route_id = "no-matched" diff --git a/apisix/plugins/kafka-logger.lua b/apisix/plugins/kafka-logger.lua index ee8453e2610db..0b22d92e5d31d 100644 --- a/apisix/plugins/kafka-logger.lua +++ b/apisix/plugins/kafka-logger.lua @@ -18,14 +18,12 @@ local core = require("apisix.core") local log_util = require("apisix.utils.log-util") local producer = require ("resty.kafka.producer") local bp_manager_mod = require("apisix.utils.batch-processor-manager") -local plugin = require("apisix.plugin") local math = math local pairs = pairs local type = type local plugin_name = "kafka-logger" local batch_processor_manager = bp_manager_mod.new("kafka logger") -local ngx = ngx local lrucache = core.lrucache.new({ type = "plugin", @@ -220,17 +218,7 @@ function _M.log(conf, ctx) -- core.log.info("origin entry: ", entry) else - local metadata = plugin.plugin_metadata(plugin_name) - core.log.info("metadata: ", core.json.delay_encode(metadata)) - if metadata and metadata.value.log_format - and core.table.nkeys(metadata.value.log_format) > 0 - then - entry = log_util.get_custom_format_log(ctx, metadata.value.log_format) - core.log.info("custom log format entry: ", core.json.delay_encode(entry)) - else - entry = log_util.get_full_log(ngx, conf) - core.log.info("full log entry: ", core.json.delay_encode(entry)) - end + entry = log_util.get_log_entry(plugin_name, conf, ctx) end if batch_processor_manager:add_entry(conf, entry) then diff --git a/apisix/plugins/loggly.lua b/apisix/plugins/loggly.lua index 80200394d132c..a276d555dc862 100644 --- a/apisix/plugins/loggly.lua +++ b/apisix/plugins/loggly.lua @@ -175,24 +175,15 @@ end local function generate_log_message(conf, ctx) - local metadata = plugin.plugin_metadata(plugin_name) - local entry - - if metadata and metadata.value.log_format - and core.table.nkeys(metadata.value.log_format) > 0 - then - entry = log_util.get_custom_format_log(ctx, metadata.value.log_format) - else - entry = log_util.get_full_log(ngx, conf) - end - + local entry = log_util.get_log_entry(plugin_name, conf, ctx) local json_str, err = core.json.encode(entry) if not json_str then core.log.error('error occurred while encoding the data: ', err) return nil end - if metadata.value.protocol ~= "syslog" then + local metadata = plugin.plugin_metadata(plugin_name) + if metadata and metadata.value.protocol ~= "syslog" then return json_str end diff --git a/apisix/plugins/rocketmq-logger.lua b/apisix/plugins/rocketmq-logger.lua index 7ca61390cf70a..f1fa54b468cfd 100644 --- a/apisix/plugins/rocketmq-logger.lua +++ b/apisix/plugins/rocketmq-logger.lua @@ -19,12 +19,10 @@ local log_util = require("apisix.utils.log-util") local producer = require ("resty.rocketmq.producer") local acl_rpchook = require("resty.rocketmq.acl_rpchook") local bp_manager_mod = require("apisix.utils.batch-processor-manager") -local plugin = require("apisix.plugin") local type = type local plugin_name = "rocketmq-logger" local batch_processor_manager = bp_manager_mod.new("rocketmq logger") -local ngx = ngx local lrucache = core.lrucache.new({ type = "plugin", @@ -140,17 +138,7 @@ function _M.log(conf, ctx) if conf.meta_format == "origin" then entry = log_util.get_req_original(ctx, conf) else - local metadata = plugin.plugin_metadata(plugin_name) - core.log.info("metadata: ", core.json.delay_encode(metadata)) - if metadata and metadata.value.log_format - and core.table.nkeys(metadata.value.log_format) > 0 - then - entry = log_util.get_custom_format_log(ctx, metadata.value.log_format) - core.log.info("custom log format entry: ", core.json.delay_encode(entry)) - else - entry = log_util.get_full_log(ngx, conf) - core.log.info("full log entry: ", core.json.delay_encode(entry)) - end + entry = log_util.get_log_entry(plugin_name, conf, ctx) end if batch_processor_manager:add_entry(conf, entry) then diff --git a/apisix/plugins/skywalking-logger.lua b/apisix/plugins/skywalking-logger.lua index 7258cb3c26b74..605c8f9adc54e 100644 --- a/apisix/plugins/skywalking-logger.lua +++ b/apisix/plugins/skywalking-logger.lua @@ -20,7 +20,6 @@ local log_util = require("apisix.utils.log-util") local core = require("apisix.core") local http = require("resty.http") local url = require("net.url") -local plugin = require("apisix.plugin") local base64 = require("ngx.base64") local ngx_re = require("ngx.re") @@ -115,18 +114,7 @@ end function _M.log(conf, ctx) - local metadata = plugin.plugin_metadata(plugin_name) - core.log.info("metadata: ", core.json.delay_encode(metadata)) - - local log_body - if metadata and metadata.value.log_format - and core.table.nkeys(metadata.value.log_format) > 0 - then - log_body = log_util.get_custom_format_log(ctx, metadata.value.log_format) - else - log_body = log_util.get_full_log(ngx, conf) - end - + local log_body = log_util.get_log_entry(plugin_name, conf, ctx) local trace_context local sw_header = ngx.req.get_headers()["sw8"] if sw_header then diff --git a/apisix/plugins/sls-logger.lua b/apisix/plugins/sls-logger.lua index 89fc952baa565..8cbe009f3f66a 100644 --- a/apisix/plugins/sls-logger.lua +++ b/apisix/plugins/sls-logger.lua @@ -17,7 +17,6 @@ local core = require("apisix.core") local log_util = require("apisix.utils.log-util") local bp_manager_mod = require("apisix.utils.batch-processor-manager") -local plugin = require("apisix.plugin") local plugin_name = "sls-logger" @@ -131,17 +130,7 @@ end -- log phase in APISIX function _M.log(conf, ctx) - local metadata = plugin.plugin_metadata(plugin_name) - local entry - - if metadata and metadata.value.log_format - and core.table.nkeys(metadata.value.log_format) > 0 - then - entry = log_util.get_custom_format_log(ctx, metadata.value.log_format) - else - entry = log_util.get_full_log(ngx, conf) - end - + local entry = log_util.get_log_entry(plugin_name, conf, ctx) local json_str, err = core.json.encode(entry) if not json_str then core.log.error('error occurred while encoding the data: ', err) diff --git a/apisix/plugins/splunk-hec-logging.lua b/apisix/plugins/splunk-hec-logging.lua index 708db63dd558f..5f3902c694e4f 100644 --- a/apisix/plugins/splunk-hec-logging.lua +++ b/apisix/plugins/splunk-hec-logging.lua @@ -60,28 +60,42 @@ local schema = { required = { "endpoint" }, } +local metadata_schema = { + type = "object", + properties = { + log_format = log_util.metadata_schema_log_format, + }, +} local _M = { version = 0.1, priority = 409, name = plugin_name, + metadata_schema = metadata_schema, schema = batch_processor_manager:wrap_schema(schema), } -function _M.check_schema(conf) +function _M.check_schema(conf, schema_type) + if schema_type == core.schema.TYPE_METADATA then + return core.schema.check(metadata_schema, conf) + end + return core.schema.check(schema, conf) end -local function get_logger_entry(conf) - local entry = log_util.get_full_log(ngx, conf) - return { +local function get_logger_entry(conf, ctx) + local entry, customized = log_util.get_log_entry(plugin_name, conf, ctx) + local splunk_entry = { time = ngx_now(), - host = entry.server.hostname, source = DEFAULT_SPLUNK_HEC_ENTRY_SOURCE, sourcetype = DEFAULT_SPLUNK_HEC_ENTRY_TYPE, - event = { + } + + if not customized then + splunk_entry.host = entry.server.hostname + splunk_entry.event = { request_url = entry.request.url, request_method = entry.request.method, request_headers = entry.request.headers, @@ -93,7 +107,12 @@ local function get_logger_entry(conf) latency = entry.latency, upstream = entry.upstream, } - } + else + splunk_entry.host = core.utils.gethostname() + splunk_entry.event = entry + end + + return splunk_entry end @@ -132,7 +151,7 @@ end function _M.log(conf, ctx) - local entry = get_logger_entry(conf) + local entry = get_logger_entry(conf, ctx) if batch_processor_manager:add_entry(conf, entry) then return diff --git a/apisix/plugins/syslog.lua b/apisix/plugins/syslog.lua index 5dd591f8aac0c..5c0be97376b8c 100644 --- a/apisix/plugins/syslog.lua +++ b/apisix/plugins/syslog.lua @@ -19,9 +19,7 @@ local core = require("apisix.core") local log_util = require("apisix.utils.log-util") local bp_manager_mod = require("apisix.utils.batch-processor-manager") local syslog = require("apisix.plugins.syslog.init") -local plugin = require("apisix.plugin") local plugin_name = "syslog" -local ngx = ngx local batch_processor_manager = bp_manager_mod.new("sys logger") local schema = { @@ -69,18 +67,7 @@ end function _M.log(conf, ctx) - local metadata = plugin.plugin_metadata(plugin_name) - core.log.info("metadata: ", core.json.delay_encode(metadata)) - - local entry - - if metadata and metadata.value.log_format - and core.table.nkeys(metadata.value.log_format) > 0 - then - entry = log_util.get_custom_format_log(ctx, metadata.value.log_format) - else - entry = log_util.get_full_log(ngx, conf) - end + local entry = log_util.get_log_entry(plugin_name, conf, ctx) syslog.push_entry(conf, ctx, entry) end diff --git a/apisix/plugins/tcp-logger.lua b/apisix/plugins/tcp-logger.lua index 651ab03ba94d0..28e925f5ecd06 100644 --- a/apisix/plugins/tcp-logger.lua +++ b/apisix/plugins/tcp-logger.lua @@ -37,18 +37,31 @@ local schema = { required = {"host", "port"} } +local metadata_schema = { + type = "object", + properties = { + log_format = log_util.metadata_schema_log_format, + }, +} local _M = { version = 0.1, priority = 405, name = plugin_name, + metadata_schema = metadata_schema, schema = batch_processor_manager:wrap_schema(schema), } -function _M.check_schema(conf) + +function _M.check_schema(conf, schema_type) + if schema_type == core.schema.TYPE_METADATA then + return core.schema.check(metadata_schema, conf) + end + return core.schema.check(schema, conf) end + local function send_tcp_data(conf, log_message) local err_msg local res = true @@ -94,7 +107,7 @@ end function _M.log(conf, ctx) - local entry = log_util.get_full_log(ngx, conf) + local entry = log_util.get_log_entry(plugin_name, conf, ctx) if batch_processor_manager:add_entry(conf, entry) then return diff --git a/apisix/plugins/tencent-cloud-cls.lua b/apisix/plugins/tencent-cloud-cls.lua index d9b032b01b35a..8d1f2d001c6bb 100644 --- a/apisix/plugins/tencent-cloud-cls.lua +++ b/apisix/plugins/tencent-cloud-cls.lua @@ -19,9 +19,7 @@ local core = require("apisix.core") local log_util = require("apisix.utils.log-util") local bp_manager_mod = require("apisix.utils.batch-processor-manager") local cls_sdk = require("apisix.plugins.tencent-cloud-cls.cls-sdk") -local plugin = require("apisix.plugin") local math = math -local ngx = ngx local pairs = pairs @@ -102,19 +100,8 @@ function _M.log(conf, ctx) core.log.debug("cls not sampled, skip log") return end - local metadata = plugin.plugin_metadata(plugin_name) - core.log.info("metadata: ", core.json.delay_encode(metadata)) - - local entry - - if metadata and metadata.value.log_format - and core.table.nkeys(metadata.value.log_format) > 0 - then - core.log.debug("using custom format log") - entry = log_util.get_custom_format_log(ctx, metadata.value.log_format) - else - entry = log_util.get_full_log(ngx, conf) - end + + local entry = log_util.get_log_entry(plugin_name, conf, ctx) if conf.global_tag then for k, v in pairs(conf.global_tag) do diff --git a/apisix/plugins/udp-logger.lua b/apisix/plugins/udp-logger.lua index 9962952898793..45bfb58ed16db 100644 --- a/apisix/plugins/udp-logger.lua +++ b/apisix/plugins/udp-logger.lua @@ -35,18 +35,31 @@ local schema = { required = {"host", "port"} } +local metadata_schema = { + type = "object", + properties = { + log_format = log_util.metadata_schema_log_format, + }, +} local _M = { version = 0.1, priority = 400, name = plugin_name, + metadata_schema = metadata_schema, schema = batch_processor_manager:wrap_schema(schema), } -function _M.check_schema(conf) + +function _M.check_schema(conf, schema_type) + if schema_type == core.schema.TYPE_METADATA then + return core.schema.check(metadata_schema, conf) + end + return core.schema.check(schema, conf) end + local function send_udp_data(conf, log_message) local err_msg local res = true @@ -80,7 +93,7 @@ end function _M.log(conf, ctx) - local entry = log_util.get_full_log(ngx, conf) + local entry = log_util.get_log_entry(plugin_name, conf, ctx) if batch_processor_manager:add_entry(conf, entry) then return diff --git a/apisix/stream/plugins/syslog.lua b/apisix/stream/plugins/syslog.lua index fcae830337dcc..4f99cab8bf7ac 100644 --- a/apisix/stream/plugins/syslog.lua +++ b/apisix/stream/plugins/syslog.lua @@ -19,7 +19,6 @@ local core = require("apisix.core") local log_util = require("apisix.utils.log-util") local bp_manager_mod = require("apisix.utils.batch-processor-manager") local syslog = require("apisix.plugins.syslog.init") -local plugin = require("apisix.plugin") local plugin_name = "syslog" local batch_processor_manager = bp_manager_mod.new("stream sys logger") @@ -66,15 +65,11 @@ end function _M.log(conf, ctx) - local metadata = plugin.plugin_metadata(plugin_name) - if not metadata or not metadata.value.log_format - or core.table.nkeys(metadata.value.log_format) <= 0 - then - core.log.error("syslog's log_format is not set") + local entry = log_util.get_log_entry(plugin_name, conf, ctx) + if not entry then return end - local entry = log_util.get_custom_format_log(ctx, metadata.value.log_format) syslog.push_entry(conf, ctx, entry) end diff --git a/apisix/utils/log-util.lua b/apisix/utils/log-util.lua index 7200c0cb81a44..c1f38ea2d738d 100644 --- a/apisix/utils/log-util.lua +++ b/apisix/utils/log-util.lua @@ -15,6 +15,7 @@ -- limitations under the License. -- local core = require("apisix.core") +local plugin = require("apisix.plugin") local expr = require("resty.expr.v1") local ngx = ngx local pairs = pairs @@ -24,6 +25,7 @@ local str_byte = string.byte local math_floor = math.floor local ngx_update_time = ngx.update_time local req_get_body_data = ngx.req.get_body_data +local is_http = ngx.config.subsystem == "http" local lru_log_format = core.lrucache.new({ ttl = 300, count = 512 @@ -71,9 +73,17 @@ local function get_custom_format_log(ctx, format) end return entry end +-- export the log getter so we can mock in tests _M.get_custom_format_log = get_custom_format_log +-- for test +function _M.inject_get_custom_format_log(f) + get_custom_format_log = f + _M.get_custom_format_log = f +end + + local function latency_details_in_ms(ctx) local latency = (ngx_now() - ngx.req.start_time()) * 1000 local upstream_latency, apisix_latency = nil, latency @@ -193,6 +203,37 @@ end _M.get_full_log = get_full_log +-- for test +function _M.inject_get_full_log(f) + get_full_log = f +end + + +function _M.get_log_entry(plugin_name, conf, ctx) + local metadata = plugin.plugin_metadata(plugin_name) + core.log.info("metadata: ", core.json.delay_encode(metadata)) + + local entry + local customized = false + + if metadata and metadata.value.log_format + and core.table.nkeys(metadata.value.log_format) > 0 + then + customized = true + entry = get_custom_format_log(ctx, metadata.value.log_format) + else + if is_http then + entry = get_full_log(ngx, conf) + else + -- get_full_log doesn't work in stream + core.log.error(plugin_name, "'s log_format is not set") + end + end + + return entry, customized +end + + function _M.get_req_original(ctx, conf) local headers = { ctx.var.request, "\r\n" diff --git a/docs/en/latest/plugins/google-cloud-logging.md b/docs/en/latest/plugins/google-cloud-logging.md index d04d939cad079..c6c954a7ed426 100644 --- a/docs/en/latest/plugins/google-cloud-logging.md +++ b/docs/en/latest/plugins/google-cloud-logging.md @@ -51,6 +51,39 @@ NOTE: `encrypt_fields = {"auth_config.private_key"}` is also defined in the sche This Plugin supports using batch processors to aggregate and process entries (logs/data) in a batch. This avoids the need for frequently submitting the data. The batch processor submits data every `5` seconds or when the data in the queue reaches `1000`. See [Batch Processor](../batch-processor.md#configuration) for more information or setting your custom configuration. +## Metadata + +You can also set the format of the logs by configuring the Plugin metadata. The following configurations are available: + +| Name | Type | Required | Default | Description | +| ---------- | ------ | -------- | ----------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| log_format | object | False | {"host": "$host", "@timestamp": "$time_iso8601", "client_ip": "$remote_addr"} | Log format declared as key value pairs in JSON format. Values only support strings. [APISIX](../apisix-variable.md) or [Nginx](http://nginx.org/en/docs/varindex.html) variables can be used by prefixing the string with `$`. | + +:::info IMPORTANT + +Configuring the Plugin metadata is global in scope. This means that it will take effect on all Routes and Services which use the `google-cloud-logging` Plugin. + +::: + +The example below shows how you can configure through the Admin API: + +```shell +curl http://127.0.0.1:9180/apisix/admin/plugin_metadata/google-cloud-logging -H 'X-API-KEY: edd1c9f034335f136f87ad84b625c8f1' -X PUT -d ' +{ + "log_format": { + "host": "$host", + "@timestamp": "$time_iso8601", + "client_ip": "$remote_addr" + } +}' +``` + +With this configuration, your logs would be formatted as shown below: + +```json +{"partialSuccess":false,"entries":[{"jsonPayload":{"client_ip":"127.0.0.1","host":"localhost","@timestamp":"2023-01-09T14:47:25+08:00","route_id":"1"},"resource":{"type":"global"},"insertId":"942e81f60b9157f0d46bc9f5a8f0cc40","logName":"projects/apisix/logs/apisix.apache.org%2Flogs","timestamp":"2023-01-09T14:47:25+08:00","labels":{"source":"apache-apisix-google-cloud-logging"}}]} +``` + ## Enabling the Plugin ### Full configuration diff --git a/docs/en/latest/plugins/splunk-hec-logging.md b/docs/en/latest/plugins/splunk-hec-logging.md index 3ed9498345b8a..69addb1459ad9 100644 --- a/docs/en/latest/plugins/splunk-hec-logging.md +++ b/docs/en/latest/plugins/splunk-hec-logging.md @@ -46,6 +46,39 @@ When the Plugin is enabled, APISIX will serialize the request context informatio This Plugin supports using batch processors to aggregate and process entries (logs/data) in a batch. This avoids the need for frequently submitting the data. The batch processor submits data every `5` seconds or when the data in the queue reaches `1000`. See [Batch Processor](../batch-processor.md#configuration) for more information or setting your custom configuration. +## Metadata + +You can also set the format of the logs by configuring the Plugin metadata. The following configurations are available: + +| Name | Type | Required | Default | Description | +| ---------- | ------ | -------- | ----------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| log_format | object | False | {"host": "$host", "@timestamp": "$time_iso8601", "client_ip": "$remote_addr"} | Log format declared as key value pairs in JSON format. Values only support strings. [APISIX](../apisix-variable.md) or [Nginx](http://nginx.org/en/docs/varindex.html) variables can be used by prefixing the string with `$`. | + +:::info IMPORTANT + +Configuring the Plugin metadata is global in scope. This means that it will take effect on all Routes and Services which use the `splunk-hec-logging` Plugin. + +::: + +The example below shows how you can configure through the Admin API: + +```shell +curl http://127.0.0.1:9180/apisix/admin/plugin_metadata/splunk-hec-logging -H 'X-API-KEY: edd1c9f034335f136f87ad84b625c8f1' -X PUT -d ' +{ + "log_format": { + "host": "$host", + "@timestamp": "$time_iso8601", + "client_ip": "$remote_addr" + } +}' +``` + +With this configuration, your logs would be formatted as shown below: + +```json +[{"time":1673976669.269,"source":"apache-apisix-splunk-hec-logging","event":{"host":"localhost","client_ip":"127.0.0.1","@timestamp":"2023-01-09T14:47:25+08:00","route_id":"1"},"host":"DESKTOP-2022Q8F-wsl","sourcetype":"_json"}] +``` + ## Enabling the Plugin ### Full configuration diff --git a/docs/en/latest/plugins/tcp-logger.md b/docs/en/latest/plugins/tcp-logger.md index 2441fc91bc578..a5a722a80bb38 100644 --- a/docs/en/latest/plugins/tcp-logger.md +++ b/docs/en/latest/plugins/tcp-logger.md @@ -48,6 +48,39 @@ This plugin also allows to push logs as a batch to your external TCP server. It This Plugin supports using batch processors to aggregate and process entries (logs/data) in a batch. This avoids the need for frequently submitting the data. The batch processor submits data every `5` seconds or when the data in the queue reaches `1000`. See [Batch Processor](../batch-processor.md#configuration) for more information or setting your custom configuration. +## Metadata + +You can also set the format of the logs by configuring the Plugin metadata. The following configurations are available: + +| Name | Type | Required | Default | Description | +| ---------- | ------ | -------- | ----------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| log_format | object | False | {"host": "$host", "@timestamp": "$time_iso8601", "client_ip": "$remote_addr"} | Log format declared as key value pairs in JSON format. Values only support strings. [APISIX](../apisix-variable.md) or [Nginx](http://nginx.org/en/docs/varindex.html) variables can be used by prefixing the string with `$`. | + +:::info IMPORTANT + +Configuring the Plugin metadata is global in scope. This means that it will take effect on all Routes and Services which use the `tcp-logger` Plugin. + +::: + +The example below shows how you can configure through the Admin API: + +```shell +curl http://127.0.0.1:9180/apisix/admin/plugin_metadata/tcp-logger -H 'X-API-KEY: edd1c9f034335f136f87ad84b625c8f1' -X PUT -d ' +{ + "log_format": { + "host": "$host", + "@timestamp": "$time_iso8601", + "client_ip": "$remote_addr" + } +}' +``` + +With this configuration, your logs would be formatted as shown below: + +```json +{"@timestamp":"2023-01-09T14:47:25+08:00","route_id":"1","host":"localhost","client_ip":"127.0.0.1"} +``` + ## Enabling the Plugin The example below shows how you can enable the `tcp-logger` Plugin on a specific Route: diff --git a/docs/en/latest/plugins/udp-logger.md b/docs/en/latest/plugins/udp-logger.md index 714ba59ed143a..9ee36ac3c22b8 100644 --- a/docs/en/latest/plugins/udp-logger.md +++ b/docs/en/latest/plugins/udp-logger.md @@ -47,6 +47,39 @@ This plugin also allows to push logs as a batch to your external UDP server. It This Plugin supports using batch processors to aggregate and process entries (logs/data) in a batch. This avoids the need for frequently submitting the data. The batch processor submits data every `5` seconds or when the data in the queue reaches `1000`. See [Batch Processor](../batch-processor.md#configuration) for more information or setting your custom configuration. +## Metadata + +You can also set the format of the logs by configuring the Plugin metadata. The following configurations are available: + +| Name | Type | Required | Default | Description | +| ---------- | ------ | -------- | ----------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| log_format | object | False | {"host": "$host", "@timestamp": "$time_iso8601", "client_ip": "$remote_addr"} | Log format declared as key value pairs in JSON format. Values only support strings. [APISIX](../apisix-variable.md) or [Nginx](http://nginx.org/en/docs/varindex.html) variables can be used by prefixing the string with `$`. | + +:::info IMPORTANT + +Configuring the Plugin metadata is global in scope. This means that it will take effect on all Routes and Services which use the `udp-logger` Plugin. + +::: + +The example below shows how you can configure through the Admin API: + +```shell +curl http://127.0.0.1:9180/apisix/admin/plugin_metadata/udp-logger -H 'X-API-KEY: edd1c9f034335f136f87ad84b625c8f1' -X PUT -d ' +{ + "log_format": { + "host": "$host", + "@timestamp": "$time_iso8601", + "client_ip": "$remote_addr" + } +}' +``` + +With this configuration, your logs would be formatted as shown below: + +```json +{"@timestamp":"2023-01-09T14:47:25+08:00","route_id":"1","host":"localhost","client_ip":"127.0.0.1"} +``` + ## Enabling the Plugin The example below shows how you can enable the Plugin on a specific Route: diff --git a/docs/zh/latest/plugins/google-cloud-logging.md b/docs/zh/latest/plugins/google-cloud-logging.md index 7a62b8ac991ba..96f885c7ecd75 100644 --- a/docs/zh/latest/plugins/google-cloud-logging.md +++ b/docs/zh/latest/plugins/google-cloud-logging.md @@ -51,6 +51,38 @@ description: API 网关 Apache APISIX 的 google-cloud-logging 插件可用于 该插件支持使用批处理器来聚合并批量处理条目(日志和数据)。这样可以避免该插件频繁地提交数据。默认情况下每 `5` 秒钟或队列中的数据达到 `1000` 条时,批处理器会自动提交数据,如需了解更多信息或自定义配置,请参考 [Batch Processor](../batch-processor.md#配置)。 +## 插件元数据 + +| 名称 | 类型 | 必选项 | 默认值 | 有效值 | 描述 | +| ---------------- | ------- | ------ | ------------- | ------- | ------------------------------------------------ | +| log_format | object | 否 | {"host": "$host", "@timestamp": "$time_iso8601", "client_ip": "$remote_addr"} | | 以 JSON 格式的键值对来声明日志格式。对于值部分,仅支持字符串。如果是以 `$` 开头。则表明获取 [APISIX 变量](../apisix-variable.md) 或 [NGINX 内置变量](http://nginx.org/en/docs/varindex.html)。 | + +:::info 注意 + +该设置全局生效。如果指定了 `log_format`,则所有绑定 `google-cloud-logging` 的路由或服务都将使用该日志格式。 + +::: + +以下示例展示了如何通过 Admin API 配置插件元数据: + +```shell +curl http://127.0.0.1:9180/apisix/admin/plugin_metadata/google-cloud-logging \ +-H 'X-API-KEY: edd1c9f034335f136f87ad84b625c8f1' -X PUT -d ' +{ + "log_format": { + "host": "$host", + "@timestamp": "$time_iso8601", + "client_ip": "$remote_addr" + } +}' +``` + +配置完成后,你将在日志系统中看到如下类似日志: + +```json +{"partialSuccess":false,"entries":[{"jsonPayload":{"client_ip":"127.0.0.1","host":"localhost","@timestamp":"2023-01-09T14:47:25+08:00","route_id":"1"},"resource":{"type":"global"},"insertId":"942e81f60b9157f0d46bc9f5a8f0cc40","logName":"projects/apisix/logs/apisix.apache.org%2Flogs","timestamp":"2023-01-09T14:47:25+08:00","labels":{"source":"apache-apisix-google-cloud-logging"}}]} +``` + ## 启用插件 以下示例展示了如何在指定路由上启用该插件: diff --git a/docs/zh/latest/plugins/splunk-hec-logging.md b/docs/zh/latest/plugins/splunk-hec-logging.md index a95e528643664..48cf62c94e699 100644 --- a/docs/zh/latest/plugins/splunk-hec-logging.md +++ b/docs/zh/latest/plugins/splunk-hec-logging.md @@ -47,6 +47,38 @@ description: API 网关 Apache APISIX 的 splunk-hec-logging 插件可用于将 本插件支持使用批处理器来聚合并批量处理条目(日志和数据)。这样可以避免该插件频繁地提交数据。默认情况下每 `5` 秒钟或队列中的数据达到 `1000` 条时,批处理器会自动提交数据,如需了解更多信息或自定义配置,请参考 [Batch-Processor](../batch-processor.md#配置)。 +## 插件元数据 + +| 名称 | 类型 | 必选项 | 默认值 | 有效值 | 描述 | +| ---------------- | ------- | ------ | ------------- | ------- | ------------------------------------------------ | +| log_format | object | 否 | {"host": "$host", "@timestamp": "$time_iso8601", "client_ip": "$remote_addr"} | | 以 JSON 格式的键值对来声明日志格式。对于值部分,仅支持字符串。如果是以 `$` 开头。则表明获取 [APISIX 变量](../apisix-variable.md) 或 [NGINX 内置变量](http://nginx.org/en/docs/varindex.html)。 | + +:::info 注意 + +该设置全局生效。如果指定了 `log_format`,则所有绑定 `splunk-hec-logging` 的路由或服务都将使用该日志格式。 + +::: + +以下示例展示了如何通过 Admin API 配置插件元数据: + +```shell +curl http://127.0.0.1:9180/apisix/admin/plugin_metadata/splunk-hec-logging \ +-H 'X-API-KEY: edd1c9f034335f136f87ad84b625c8f1' -X PUT -d ' +{ + "log_format": { + "host": "$host", + "@timestamp": "$time_iso8601", + "client_ip": "$remote_addr" + } +}' +``` + +配置完成后,你将在日志系统中看到如下类似日志: + +```json +[{"time":1673976669.269,"source":"apache-apisix-splunk-hec-logging","event":{"host":"localhost","client_ip":"127.0.0.1","@timestamp":"2023-01-09T14:47:25+08:00","route_id":"1"},"host":"DESKTOP-2022Q8F-wsl","sourcetype":"_json"}] +``` + ## 启用插件 以下示例展示了如何在指定路由上启用该插件: diff --git a/docs/zh/latest/plugins/tcp-logger.md b/docs/zh/latest/plugins/tcp-logger.md index 866f37ce5812f..e3ba924ad5582 100644 --- a/docs/zh/latest/plugins/tcp-logger.md +++ b/docs/zh/latest/plugins/tcp-logger.md @@ -46,6 +46,38 @@ description: 本文介绍了 API 网关 Apache APISIX 如何使用 tcp-logger 该插件支持使用批处理器来聚合并批量处理条目(日志/数据)。这样可以避免插件频繁地提交数据,默认情况下批处理器每 `5` 秒钟或队列中的数据达到 `1000` 条时提交数据,如需了解批处理器相关参数设置,请参考 [Batch-Processor](../batch-processor.md#配置)。 +## 插件元数据 + +| 名称 | 类型 | 必选项 | 默认值 | 有效值 | 描述 | +| ---------------- | ------- | ------ | ------------- | ------- | ------------------------------------------------ | +| log_format | object | 否 | {"host": "$host", "@timestamp": "$time_iso8601", "client_ip": "$remote_addr"} | | 以 JSON 格式的键值对来声明日志格式。对于值部分,仅支持字符串。如果是以 `$` 开头。则表明获取 [APISIX 变量](../apisix-variable.md) 或 [NGINX 内置变量](http://nginx.org/en/docs/varindex.html)。 | + +:::info 注意 + +该设置全局生效。如果指定了 `log_format`,则所有绑定 `tcp-logger` 的路由或服务都将使用该日志格式。 + +::: + +以下示例展示了如何通过 Admin API 配置插件元数据: + +```shell +curl http://127.0.0.1:9180/apisix/admin/plugin_metadata/tcp-logger \ +-H 'X-API-KEY: edd1c9f034335f136f87ad84b625c8f1' -X PUT -d ' +{ + "log_format": { + "host": "$host", + "@timestamp": "$time_iso8601", + "client_ip": "$remote_addr" + } +}' +``` + +配置完成后,你将在日志系统中看到如下类似日志: + +```json +{"@timestamp":"2023-01-09T14:47:25+08:00","route_id":"1","host":"localhost","client_ip":"127.0.0.1"} +``` + ## 启用插件 你可以通过以下命令在指定路由中启用该插件: diff --git a/docs/zh/latest/plugins/udp-logger.md b/docs/zh/latest/plugins/udp-logger.md index 1553b16d9c69a..797967ed59eb8 100644 --- a/docs/zh/latest/plugins/udp-logger.md +++ b/docs/zh/latest/plugins/udp-logger.md @@ -45,6 +45,38 @@ description: 本文介绍了 API 网关 Apache APISIX 如何使用 udp-logger 该插件支持使用批处理器来聚合并批量处理条目(日志和数据)。这样可以避免插件频繁地提交数据,默认情况下批处理器每 `5` 秒钟或队列中的数据达到 `1000` 条时提交数据,如需了解批处理器相关参数设置,请参考 [Batch-Processor](../batch-processor.md#配置)。 +## 插件元数据 + +| 名称 | 类型 | 必选项 | 默认值 | 有效值 | 描述 | +| ---------------- | ------- | ------ | ------------- | ------- | ------------------------------------------------ | +| log_format | object | 否 | {"host": "$host", "@timestamp": "$time_iso8601", "client_ip": "$remote_addr"} | | 以 JSON 格式的键值对来声明日志格式。对于值部分,仅支持字符串。如果是以 `$` 开头。则表明获取 [APISIX 变量](../apisix-variable.md) 或 [NGINX 内置变量](http://nginx.org/en/docs/varindex.html)。 | + +:::info 注意 + +该设置全局生效。如果指定了 `log_format`,则所有绑定 `udp-logger` 的路由或服务都将使用该日志格式。 + +::: + +以下示例展示了如何通过 Admin API 配置插件元数据: + +```shell +curl http://127.0.0.1:9180/apisix/admin/plugin_metadata/udp-logger \ +-H 'X-API-KEY: edd1c9f034335f136f87ad84b625c8f1' -X PUT -d ' +{ + "log_format": { + "host": "$host", + "@timestamp": "$time_iso8601", + "client_ip": "$remote_addr" + } +}' +``` + +配置完成后,你将在日志系统中看到如下类似日志: + +```json +{"@timestamp":"2023-01-09T14:47:25+08:00","route_id":"1","host":"localhost","client_ip":"127.0.0.1"} +``` + ## 如何开启 你可以通过如下命令在指定路由上启用 `udp-logger` 插件: diff --git a/t/plugin/elasticsearch-logger.t b/t/plugin/elasticsearch-logger.t index 11b85e14b9fb5..623dbcf4444a5 100644 --- a/t/plugin/elasticsearch-logger.t +++ b/t/plugin/elasticsearch-logger.t @@ -158,11 +158,11 @@ passed local http = require("resty.http") local ngx_re = require("ngx.re") local log_util = require("apisix.utils.log-util") - log_util.get_full_log = function(ngx, conf) + log_util.inject_get_full_log(function(ngx, conf) return { test = "test" } - end + end) http.request_uri = function(self, uri, params) if not params.body or type(params.body) ~= "string" then @@ -409,11 +409,11 @@ passed local http = require("resty.http") local ngx_re = require("ngx.re") local log_util = require("apisix.utils.log-util") - log_util.get_custom_format_log = function(ctx, format) + log_util.inject_get_custom_format_log(function(ctx, format) return { test = "test" } - end + end) http.request_uri = function(self, uri, params) if not params.body or type(params.body) ~= "string" then diff --git a/t/plugin/google-cloud-logging2.t b/t/plugin/google-cloud-logging2.t index 6fbe6b350eef0..0b8be32dfffa9 100644 --- a/t/plugin/google-cloud-logging2.t +++ b/t/plugin/google-cloud-logging2.t @@ -193,3 +193,142 @@ Yp8D0aqsLEgwGrJQER26FPpKmyIwvcL+nm6q5W31PnU9AOC/WEkB6Zs58hsMzD2S kEJQcmfVew5mFXyxuEn3zA== -----END PRIVATE KEY----- YnwwDKc5vNzo0OU4StTRQbwgCnTZ3dmYiBFm8aGnvTxlE86D2nT07Q3BWhUdky6OGIox4MRLbiHz13NZjyUao/Nudh4PeTj5wMldPD5YvNWtbTG4ig/TNSdBncmIQPLPaUqSweE61pnASxodpTlBJ5k9yxfTmwBTOkzZevoKy9D2E4wF9vGCdkcPK/tAkvRoJTj6xD3xVuAbkcap/81oHplUZZ+ghlEnBZgOH8UMa73UfeNbOQVHD2mlU0LxkTXtwFhHWl50adrt890VDHev0+FUUDjv5Ysl8r/nnnlyq3SV4oqJfs/IVRKROe93e8sJ2/49o7kEv2XT1/6DjM/VsSLKfAi5rLNobcSaSzztSSLkrBFKQvvy2rRA7GFWKbIk+rPZhYmTMItDJv23XP6uzaLRPoq2f/AnRTKpWmA8Dk9TfFHsZLupKi1bmjCdtK8lpMCf9Au1rezt7+2BybQrtbbDbwPzC5bKHmKhc0GPTUzLAWQBin3tuZxSfk/MqRtG+AemwnFTHivJrfRwmc3db+b9W6WX09mV488f2M4qbqBmkiFU5VARWCGZ5vbop2KGhmB2fQPXTmj8QSYk6fBxFDnfzTfnYMIu2cQsbSBPCnoPinQNpBfFD3RQkkCiNtJ8GA8DWsivWsnW4jWyPmkIN/P1eLW1DSsU6V4cbhTQJs6/LzOCGAZB/ewu3mr1SDLWJPlIWW6atC/g0uiXkZ3VLUsS0BQffITf8sVXyz/BEbflLlT777zERDKyz/qS2JyR6U8s2h3Yg+GncPUCEF6Lx5Veb1lL+zs+Stvv+5/t2GfDlNYiwTU8HeffhEGgAv1s86OPo3CfWe7lEnu/MFHIm0czVenYdEVy449xj66DHqXUQVzVc+3NelW15FrKhcvU0Cxwqfk+xEOE185ssD06L+tOGjxPPvADjlcQQ1crH+tEcTTLnZZ/e16I10kcc5rBJwDy4COoeY6DZ0dFwtAdbjoR/KaSTGLK6n/u9Ow7OGDPZog4LhrzMOn2T7hk/oaMOKhlDvKroiSijhhkrQf5ZDhhh3GQn/ZRXjyiPWBqKEQiBJGyZ/iRONzJLsF8U8vsBzBToxmTe9prlwHusgAEIBUFrZRSvsVgsPCFOyJ6XJXDTdcCInHUGI9LsxWdlojYvvNuSvavkw1I4K+VBmlEG5FCMx56eX2X49hfXwRcM1ZyRRrmq6cRh+33aMeMLAtpKgTsQgmB/I01mGNZlstvU0XEFnCPuWcks50BTnvPEbU7GZJLE3HFmGb3vyC57E8oTR2FjhDrevPlLkxMPrLvXhwbmV+3YiZYq+8k6oBKfrrq41JRKr+SJDb7m6xL8AuZccMrNhDrkByQLi6zn95dIYc3+vNU4XBzvhpb7HMj2wvorxEW2HpQ+OVSZiZSCU6m4Fx6juj1D5pGs1nr68ybihqMrXuZBKP4b9Y6sw99kNmnWBdwNiY95sWy1qUe0MJq3r44hhVHvCUmzOVyO4aBmhMwgkaSQWpEeQwyIWENM1IMU6WUrKCSuLuKJAl5bM++ThBaLvIIMCyXl39136jHp97aVmHRXbSMPcSAb8l/YQ6SLK0HBxmFTXvroxHmPxPqrJ5jz65C72+uArgOZxJN6tyimIcTMyoJoN7N+QKxDLjgmqnJyEcthycEK3gikyloWsLppzEmHLHBDXlKpJLflvUujYrNsKf2xohx31gIlxBWCHP/1KL3QAehn+FEWUWsXn2hWAR0KAtmIOM7gZuCY8yKNDfXrAZJs14rwDlTbnhJvyijt1Tr6gleehmJDKSm2vM/NbznVTKwJDyMRner+vvc4zD06az/Y6Y4oM0e0IWM2fMaiiwjNAaKhhwJzqvM1c8+ZOfuRajmHFECEkYgXCKZiQxQihFG2wWp2i+xEGGwP2e+FbDdY9Ygyvw5SUvahyoX36AYbbTBOFY6E9aYUIM/Et8ZuXoWs1QaxGfJwcVvueqke45y3GKkp54sHXhrqfKX0TTiw6DCUs6dRTybxOjmjJCKp6Yw4KGWY0t3J0xbK08KTUMeHNxgtfYcz1/Wg/Q61CkUJkRNBninAAkEz8rV2olBHy1GZFFjCQySAyPH4PtWm1S4sBzdsui5wT+m2pC/DsCcQW++TGH9LdaHeT8B9u32lYToVN1/L2j5kjkhN13sNKfb6I9yYTnUqweQFU79toBfDt+6KNNfIA1TcmvZw8RcuMOArEqJQ6OPOhgUQBwsZaGeqFmAE4q64n5raS4OCdWtasFtItW3c5QHxkKoEEER04glVsCoxOvc80U= + + + +=== TEST 3: set route to test custom log format +--- config + location /t { + content_by_lua_block { + local config = { + uri = "/hello", + upstream = { + type = "roundrobin", + nodes = { + ["127.0.0.1:1980"] = 1 + } + }, + plugins = { + ["google-cloud-logging"] = { + auth_config = { + private_key = [[ +-----BEGIN PRIVATE KEY----- +MIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQDDzrFwnA3EvYyR +aeMgaLD3hBjvxKrz10uox1X8q7YYhf2ViRtLRUMa2bEMYksE5hbhwpNf6mKAnLOC +UuAT6cPPdUl/agKpJXviBPIR2LuzD17WsLJHp1HxUDssSkgfCaGcOGGNfLUhhIpF +2JUctLmxiZoAZySlSjcwupSuDJ0aPm0XO8r9H8Qu5kF2Vkz5e5bFivLTmvzrQTe4 +v5V1UI6hThElCSeUmdNF3uG3wopxlvq4zXgLTnuLbrNf/Gc4mlpV+UDgTISj32Ep +AB2vxKEbvQw4ti8YJnGXWjxLerhfrszFw+V8lpeduiDYA44ZFoVqvzxeIsVZNtcw +Iu7PvEPNAgMBAAECggEAVpyN9m7A1F631/aLheFpLgMbeKt4puV7zQtnaJ2XrZ9P +PR7pmNDpTu4uF3k/D8qrIm+L+uhVa+hkquf3wDct6w1JVnfQ93riImbnoKdK13ic +DcEZCwLjByfjFMNCxZ/gAZca55fbExlqhFy6EHmMjhB8s2LsXcTHRuGxNI/Vyi49 +sxECibe0U53aqdJbVWrphIS67cpwl4TUkN6mrHsNuDYNJ9dgkpapoqp4FTFQsBqC +afOK5qgJ68dWZ47FBUng+AZjdCncqAIuJxxItGVQP6YPsFs+OXcivIVHJr363TpC +l85FfdvqWV5OGBbwSKhNwiTNUVvfSQVmtURGWG/HbQKBgQD4gZ1z9+Lx19kT9WTz +lw93lxso++uhAPDTKviyWSRoEe5aN3LCd4My+/Aj+sk4ON/s2BV3ska5Im93j+vC +rCv3uPn1n2jUhWuJ3bDqipeTW4n/CQA2m/8vd26TMk22yOkkqw2MIA8sjJ//SD7g +tdG7up6DgGMP4hgbO89uGU7DAwKBgQDJtkKd0grh3u52Foeh9YaiAgYRwc65IE16 +UyD1OJxIuX/dYQDLlo5KyyngFa1ZhWIs7qC7r3xXH+10kfJY+Q+5YMjmZjlL8SR1 +Ujqd02R9F2//6OeswyReachJZbZdtiEw3lPa4jVFYfhSe0M2ZPxMwvoXb25eyCNI +1lYjSKq87wKBgHnLTNghjeDp4UKe6rNYPgRm0rDrhziJtX5JeUov1mALKb6dnmkh +GfRK9g8sQqKDfXwfC6Z2gaMK9YaryujGaWYoCpoPXtmJ6oLPXH4XHuLh4mhUiP46 +xn8FEfSimuQS4/FMxH8A128GHQSI7AhGFFzlwfrBWcvXC+mNDsTvMmLxAoGARc+4 +upppfccETQZ7JsitMgD1TMwA2f2eEwoWTAitvlXFNT9PYSbYVHaAJbga6PLLCbYF +FzAjHpxEOKYSdEyu7n/ayDL0/Z2V+qzc8KarDsg/0RgwppBbU/nUgeKb/U79qcYo +y4ai3UKNCS70Ei1dTMvmdpnwXwlxfNIBufB6dy0CgYBMYq9Lc31GkC6PcGEEbx6W +vjImOadWZbuOVnvEQjb5XCdcOsWsMcg96PtoeuyyHmhnEF1GsMzcIdQv/PHrvYpK +Yp8D0aqsLEgwGrJQER26FPpKmyIwvcL+nm6q5W31PnU9AOC/WEkB6Zs58hsMzD2S +kEJQcmfVew5mFXyxuEn3zA== +-----END PRIVATE KEY-----]], + project_id = "apisix", + token_uri = "http://127.0.0.1:1980/google/logging/token", + scopes = { + "https://apisix.apache.org/logs:admin" + }, + entries_uri = "http://127.0.0.1:1980/google/logging/entries", + }, + inactive_timeout = 1, + batch_max_size = 1, + } + } + } + local t = require("lib.test_admin").test + local code, body = t('/apisix/admin/routes/1', ngx.HTTP_PUT, config) + + if code >= 300 then + ngx.status = code + ngx.say(body) + return + end + + local code, body = t('/apisix/admin/plugin_metadata/google-cloud-logging', + ngx.HTTP_PUT, + [[{ + "log_format": { + "host": "$host", + "@timestamp": "$time_iso8601", + "client_ip": "$remote_addr" + } + }]] + ) + if code >= 300 then + ngx.status = code + ngx.say(body) + return + end + ngx.say(body) + } + } +--- response_body +passed + + + +=== TEST 4: hit +--- extra_init_by_lua + local decode = require("toolkit.json").decode + local up = require("lib.server") + up.google_logging_entries = function() + ngx.log(ngx.WARN, "the mock backend is hit") + + ngx.req.read_body() + local data = ngx.req.get_body_data() + data = decode(data) + assert(data.entries[1].jsonPayload.client_ip == "127.0.0.1") + assert(data.entries[1].resource.type == "global") + ngx.say('{}') + end +--- request +GET /hello +--- wait: 2 +--- response_body +hello world +--- error_log +the mock backend is hit +--- no_error_log +[error] + + + +=== TEST 5: bad custom log format +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + local code, body = t('/apisix/admin/plugin_metadata/google-cloud-logging', + ngx.HTTP_PUT, + [[{ + "log_format": "'$host' '$time_iso8601'" + }]] + ) + if code >= 300 then + ngx.status = code + ngx.print(body) + return + end + ngx.say(body) + } + } +--- error_code: 400 +--- response_body +{"error_msg":"invalid configuration: property \"log_format\" validation failed: wrong type: expected object, got string"} diff --git a/t/plugin/splunk-hec-logging.t b/t/plugin/splunk-hec-logging.t index 4e4b0f9351222..afd462a37b75f 100644 --- a/t/plugin/splunk-hec-logging.t +++ b/t/plugin/splunk-hec-logging.t @@ -192,3 +192,112 @@ GET /hello --- wait: 2 --- response_body hello world + + + +=== TEST 6: bad custom log format +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + local code, body = t('/apisix/admin/plugin_metadata/splunk-hec-logging', + ngx.HTTP_PUT, + [[{ + "log_format": "'$host' '$time_iso8601'" + }]] + ) + if code >= 300 then + ngx.status = code + ngx.print(body) + return + end + ngx.say(body) + } + } +--- error_code: 400 +--- response_body +{"error_msg":"invalid configuration: property \"log_format\" validation failed: wrong type: expected object, got string"} + + + +=== TEST 7: set route to test custom log format +--- config + location /t { + content_by_lua_block { + local config = { + uri = "/hello", + upstream = { + type = "roundrobin", + nodes = { + ["127.0.0.1:1980"] = 1 + } + }, + plugins = { + ["splunk-hec-logging"] = { + endpoint = { + uri = "http://127.0.0.1:1980/splunk_hec_logging", + token = "BD274822-96AA-4DA6-90EC-18940FB2414C" + }, + batch_max_size = 1, + inactive_timeout = 1 + } + } + } + local t = require("lib.test_admin").test + local code, body = t('/apisix/admin/routes/1', ngx.HTTP_PUT, config) + + if code >= 300 then + ngx.status = code + ngx.say(body) + return + end + + local code, body = t('/apisix/admin/plugin_metadata/splunk-hec-logging', + ngx.HTTP_PUT, + [[{ + "log_format": { + "host": "$host", + "@timestamp": "$time_iso8601", + "client_ip": "$remote_addr" + } + }]] + ) + if code >= 300 then + ngx.status = code + ngx.say(body) + return + end + ngx.say(body) + } + } +--- response_body +passed + + + +=== TEST 8: hit +--- extra_init_by_lua + local core = require("apisix.core") + local decode = require("toolkit.json").decode + local up = require("lib.server") + up.splunk_hec_logging = function() + ngx.log(ngx.WARN, "the mock backend is hit") + + ngx.req.read_body() + local data = ngx.req.get_body_data() + ngx.log(ngx.WARN, data) + data = decode(data) + assert(data[1].event.client_ip == "127.0.0.1") + assert(data[1].source == "apache-apisix-splunk-hec-logging") + assert(data[1].host == core.utils.gethostname()) + ngx.say('{}') + end +--- request +GET /hello +--- wait: 2 +--- response_body +hello world +--- error_log +the mock backend is hit +--- no_error_log +[error] diff --git a/t/plugin/tcp-logger.t b/t/plugin/tcp-logger.t index 4683adbc4fb84..f4ee6682305f3 100644 --- a/t/plugin/tcp-logger.t +++ b/t/plugin/tcp-logger.t @@ -271,3 +271,128 @@ qr/sending a batch logs to 127.0.0.1:(\d+)/ --- grep_error_log_out sending a batch logs to 127.0.0.1:5044 sending a batch logs to 127.0.0.1:5045 + + + +=== TEST 8: bad custom log format +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + local code, body = t('/apisix/admin/plugin_metadata/tcp-logger', + ngx.HTTP_PUT, + [[{ + "log_format": "'$host' '$time_iso8601'" + }]] + ) + if code >= 300 then + ngx.status = code + ngx.print(body) + return + end + ngx.say(body) + } + } +--- request +GET /t +--- error_code: 400 +--- response_body +{"error_msg":"invalid configuration: property \"log_format\" validation failed: wrong type: expected object, got string"} + + + +=== TEST 9: add plugin +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + local code, body = t('/apisix/admin/routes/1', + ngx.HTTP_PUT, + [[{ + "plugins": { + "tcp-logger": { + "host": "127.0.0.1", + "port": 8125, + "tls": false, + "batch_max_size": 1, + "inactive_timeout": 1 + } + }, + "upstream": { + "nodes": { + "127.0.0.1:1980": 1 + }, + "type": "roundrobin" + }, + "uri": "/hello" + }]] + ) + + if code >= 300 then + ngx.status = code + ngx.say(body) + return + end + + local code, body = t('/apisix/admin/plugin_metadata/tcp-logger', + ngx.HTTP_PUT, + [[{ + "log_format": { + "host": "$host", + "@timestamp": "$time_iso8601", + "client_ip": "$remote_addr" + } + }]] + ) + if code >= 300 then + ngx.status = code + ngx.say(body) + return + end + ngx.say(body) + } + } +--- request +GET /t +--- response_body +passed + + + +=== TEST 10: access +--- stream_conf_enable +--- extra_stream_config + server { + listen 8125; + content_by_lua_block { + local decode = require("toolkit.json").decode + ngx.log(ngx.WARN, "the mock backend is hit") + + local sock, err = ngx.req.socket(true) + if not sock then + ngx.log(ngx.ERR, "failed to get the request socket: ", err) + return + end + + local data, err = sock:receive('*a') + + if not data then + if err and err ~= "closed" then + ngx.log(ngx.ERR, "socket error, returning: ", err) + end + return + end + + data = decode(data) + assert(data.client_ip == "127.0.0.1") + } + } +--- request +GET /hello +--- response_body +hello world +--- wait: 2 +--- error_log +the mock backend is hit +--- no_error_log +[error] diff --git a/t/plugin/udp-logger.t b/t/plugin/udp-logger.t index 2ca0f187222e3..7f660bb6c1da2 100644 --- a/t/plugin/udp-logger.t +++ b/t/plugin/udp-logger.t @@ -268,3 +268,128 @@ qr/sending a batch logs to 127.0.0.1:(\d+)/ --- grep_error_log_out sending a batch logs to 127.0.0.1:2000 sending a batch logs to 127.0.0.1:2001 + + + +=== TEST 8: bad custom log format +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + local code, body = t('/apisix/admin/plugin_metadata/udp-logger', + ngx.HTTP_PUT, + [[{ + "log_format": "'$host' '$time_iso8601'" + }]] + ) + if code >= 300 then + ngx.status = code + ngx.print(body) + return + end + ngx.say(body) + } + } +--- request +GET /t +--- error_code: 400 +--- response_body +{"error_msg":"invalid configuration: property \"log_format\" validation failed: wrong type: expected object, got string"} + + + +=== TEST 9: add plugin +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + local code, body = t('/apisix/admin/routes/1', + ngx.HTTP_PUT, + [[{ + "plugins": { + "udp-logger": { + "host": "127.0.0.1", + "port": 8125, + "tls": false, + "batch_max_size": 1, + "inactive_timeout": 1 + } + }, + "upstream": { + "nodes": { + "127.0.0.1:1980": 1 + }, + "type": "roundrobin" + }, + "uri": "/hello" + }]] + ) + + if code >= 300 then + ngx.status = code + ngx.say(body) + return + end + + local code, body = t('/apisix/admin/plugin_metadata/udp-logger', + ngx.HTTP_PUT, + [[{ + "log_format": { + "host": "$host", + "@timestamp": "$time_iso8601", + "client_ip": "$remote_addr" + } + }]] + ) + if code >= 300 then + ngx.status = code + ngx.say(body) + return + end + ngx.say(body) + } + } +--- request +GET /t +--- response_body +passed + + + +=== TEST 10: access +--- stream_conf_enable +--- extra_stream_config + server { + listen 8125 udp; + content_by_lua_block { + local decode = require("toolkit.json").decode + ngx.log(ngx.WARN, "the mock backend is hit") + + local sock, err = ngx.req.socket(true) + if not sock then + ngx.log(ngx.ERR, "failed to get the request socket: ", err) + return + end + + local data, err = sock:receive() + + if not data then + if err and err ~= "no more data" then + ngx.log(ngx.ERR, "socket error, returning: ", err) + end + return + end + + data = decode(data) + assert(data.client_ip == "127.0.0.1") + } + } +--- request +GET /hello +--- response_body +hello world +--- wait: 2 +--- error_log +the mock backend is hit +--- no_error_log +[error] diff --git a/t/stream-plugin/syslog.t b/t/stream-plugin/syslog.t index c6d96c95efcd9..d185f68bfb59d 100644 --- a/t/stream-plugin/syslog.t +++ b/t/stream-plugin/syslog.t @@ -52,6 +52,11 @@ __DATA__ location /t { content_by_lua_block { local t = require("lib.test_admin").test + -- ensure the format is not set + t('/apisix/admin/plugin_metadata/syslog', + ngx.HTTP_DELETE + ) + local code, body = t('/apisix/admin/upstreams/1', ngx.HTTP_PUT, [[{