I'm trying to fetch data from following log entry using the below logstash config file and filters, but the data isn't fetching from json instead it displays the grok pattern.
Log:
13:41:37.3921 Info {"message":"CTS execution started","level":"Information","logType":"Default","timeStamp":"2019-12-03T13:41:37.3861868-05:00","fingerprint":"29dad848-4ff7-4d2d-905b-460637f3d534","windowsIdentity":"home","machineName":"L02174400","processName":"CTS","processVersion":"1.0.5","jobId":"5bbc492c-bcb7-451f-b6ac-87d9784ad00d","robotName":"home","machineId":0,"fileName":"SendBackReasons(Autosaved)"}
Config:
input{
file{
type => "executionlog"
path => ["c:/users/xyj/appdata/local/uipath/logs/*[^W]_execution.log"]
start_position => "beginning"
sincedb_path => "c:/dbfile"
}
}
filter{
grok{
match => { "message" => ["(?<id>[\d\:\.]+)\s%{LOGLEVEL:level} %{GREEDYDATA:json-data}"]
}
}
json{
source => "json_data"
target => "parsed_json"
}
mutate{
add_field => {
"Info1" => "%{[json_data][message]}" #i tried parsed_json as well here
"level2" => "%{[json_data][level]}"
}
}
}
output{
elasticsearch{
hosts=>["http://localhost:9200"]
index=> "uipathexecutionlog"
}
stdout{}
}
Kibana output: Kibana output
Try the below code,
filter {
grok {
match => {
"message" => ["(?<id>[\d\:\.]+)\s%{LOGLEVEL:level} %{GREEDYDATA:json-data}"]
}
}
json {
source => "json-data"
target => "parsed_json"
}
mutate {
add_field => {
"Info1" => "%{[parsed_json][message]}"
"level2" => "%{[parsed_json][level]}"
}
}
}