chore(kafka-logger): add brokers info to log and update docs (#4908)

This commit is contained in:
okaybase 2021-08-30 13:06:04 +08:00 committed by GitHub
parent f73094bba9
commit f8eac5e725
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 52 additions and 5 deletions

View File

@ -163,7 +163,8 @@ local function send_kafka_data(conf, log_message, prod)
prod, conf.kafka_topic, log_message))
if not ok then
return false, "failed to send data to Kafka topic: " .. err
return false, "failed to send data to Kafka topic: " .. err ..
", brokers: " .. core.json.encode(conf.broker_list)
end
return true

View File

@ -34,9 +34,7 @@ title: kafka-logger
`kafka-logger` is a plugin which works as a Kafka client driver for the ngx_lua nginx module.
This will provide the ability to send Log data requests as JSON objects to external Kafka clusters.
This plugin provides the ability to push Log data as a batch to you're external Kafka topics. In case if you did not receive the log data don't worry give it some time it will automatically send the logs after the timer function expires in our Batch Processor.
This plugin provides the ability to push requests log data as JSON objects to your external Kafka clusters. In case if you did not receive the log data don't worry give it some time it will automatically send the logs after the timer function expires in our Batch Processor.
For more info on Batch-Processor in Apache APISIX please refer.
[Batch-Processor](../batch-processor.md)
@ -169,7 +167,7 @@ curl http://127.0.0.1:9080/apisix/admin/routes/5 -H 'X-API-KEY: edd1c9f034335f13
## Test Plugin
*success:
success:
```shell
$ curl -i http://127.0.0.1:9080/hello

View File

@ -1066,3 +1066,51 @@ property "broker_list" validation failed: failed to validate 127.0.0.1 (matching
property "broker_list" validation failed: failed to validate 127.0.0.1 (matching ".*"): expected 65536 to be smaller than 65535
--- no_error_log
[error]
=== TEST 25: kafka brokers info in log
--- config
location /t {
content_by_lua_block {
local t = require("lib.test_admin").test
local code, body = t('/apisix/admin/routes/1',
ngx.HTTP_PUT,
[[{
"plugins": {
"kafka-logger": {
"broker_list" :
{
"127.0.0.127":9092
},
"kafka_topic" : "test2",
"producer_type": "sync",
"key" : "key1",
"batch_max_size": 1,
"cluster_name": 10
}
},
"upstream": {
"nodes": {
"127.0.0.1:1980": 1
},
"type": "roundrobin"
},
"uri": "/hello"
}]]
)
if code >= 300 then
ngx.status = code
end
ngx.say(body)
local http = require "resty.http"
local httpc = http.new()
local uri = "http://127.0.0.1:" .. ngx.var.server_port .. "/hello"
local res, err = httpc:request_uri(uri, {method = "GET"})
}
}
--- request
GET /t
--- error_log_like eval
qr/create new kafka producer instance, brokers: \[\{"port":9092,"host":"127.0.0.127"}]/
qr/failed to send data to Kafka topic: .*, brokers: \{"127.0.0.127":9092}/