You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
Hi Team,
Below log file contain json string , when i am trying to split multiple fileds its not working, i tried below methods.
currently i am using 1.16.2 fluentd image.
Input log:
{"logId":"xxxxxxxxxxxxxxxxxxxxxxxx","requestUrl":"/api/v1/logservice/logs/test","method":"POST","logType":"ERROR","title":"test","event":"test","data":[{"logId":null,"requestUrl":null,"method":null,"logType":null,"title":null,"event":null,"data":null,"headers":{},"resp":null,"ip":null,"time":0,"clientId":null,"tags":null,"application":null,"logStore":null,"finished":false}],"headers":{"content-length":"2","host":"localhost:30010","content-type":"application/json","user-agent":"curl/8.2.1","accept":"/"},"resp":"success","ip":"127.0.0.1","time":1693282489036,"clientId":null,"tags":["test","test"],"application":null,"logStore":null,"finished":true}
Hibernate: INSERT INTO log_records (log_id, log_index,application,collect_date,collect_count,data_size,original_date,log_store) VALUES (?, ?,?,?,?,?,?,?) ON CONFLICT (application,collect_date,log_store) DO UPDATE SET log_id = EXCLUDED.log_id,log_index = EXCLUDED.log_index,application = EXCLUDED.application,collect_date = EXCLUDED.collect_date,collect_count = log_records.collect_count + 1,data_size = EXCLUDED.data_size,original_date = EXCLUDED.original_date,log_store = EXCLUDED.log_store;
Hibernate: INSERT INTO log_stores (application, log_store) VALUES (?, ?) ON CONFLICT (application,log_store) DO UPDATE SET application = EXCLUDED.application,log_store = EXCLUDED.log_store
I have tried below methods still not work,Please help here.
Method1 @type tail
read_from_head true
tag kubernetes.*
path /var/log/containers/.log
pos_file /var/log/fluentd-containers.log.pos
exclude_path ["/var/log/containers/fluent"]
reacted with thumbs up emoji reacted with thumbs down emoji reacted with laugh emoji reacted with hooray emoji reacted with confused emoji reacted with heart emoji reacted with rocket emoji reacted with eyes emoji
-
Hi Team,
Below log file contain json string , when i am trying to split multiple fileds its not working, i tried below methods.
currently i am using 1.16.2 fluentd image.
Input log:
{"logId":"xxxxxxxxxxxxxxxxxxxxxxxx","requestUrl":"/api/v1/logservice/logs/test","method":"POST","logType":"ERROR","title":"test","event":"test","data":[{"logId":null,"requestUrl":null,"method":null,"logType":null,"title":null,"event":null,"data":null,"headers":{},"resp":null,"ip":null,"time":0,"clientId":null,"tags":null,"application":null,"logStore":null,"finished":false}],"headers":{"content-length":"2","host":"localhost:30010","content-type":"application/json","user-agent":"curl/8.2.1","accept":"/"},"resp":"success","ip":"127.0.0.1","time":1693282489036,"clientId":null,"tags":["test","test"],"application":null,"logStore":null,"finished":true}
Hibernate: INSERT INTO log_records (log_id, log_index,application,collect_date,collect_count,data_size,original_date,log_store) VALUES (?, ?,?,?,?,?,?,?) ON CONFLICT (application,collect_date,log_store) DO UPDATE SET log_id = EXCLUDED.log_id,log_index = EXCLUDED.log_index,application = EXCLUDED.application,collect_date = EXCLUDED.collect_date,collect_count = log_records.collect_count + 1,data_size = EXCLUDED.data_size,original_date = EXCLUDED.original_date,log_store = EXCLUDED.log_store;
Hibernate: INSERT INTO log_stores (application, log_store) VALUES (?, ?) ON CONFLICT (application,log_store) DO UPDATE SET application = EXCLUDED.application,log_store = EXCLUDED.log_store
Expecting Output:
{
"_index": "logstash-2023.08.28",
"_type": "_doc",
"_id": "unique_id_here",
"_version": 1,
"_score": null,
"_source": {
"stream": "stdout",
"character": "F",
"message": "{"logId":"xxxxxxxxxxxxxxxxxxxxxxxx","requestUrl":"/api/v1/logservice/logs/test","method":"POST","logType":"ERROR","title":"test","event":"test","data":[{"logId":null,"requestUrl":null,"method":null,"logType":null,"title":null,"event":null,"data":null,"headers":{},"resp":null,"ip":null,"time":0,"clientId":null,"tags":null,"application":null,"logStore":null,"finished":false}],"headers":{"content-length":"2","host":"localhost:30010","content-type":"application/json","user-agent":"curl/8.2.1","accept":"/"},"resp":"success","ip":"127.0.0.1","time":1693282489036,"clientId":null,"tags":["test","test"],"application":null,"logStore":null,"finished":true}",
"@timestamp": "2023-08-28T16:26:10.764097955+00:00",
"logId": "xxxxxxxxxxxxxxxxxxxxxxxx",
"requestUrl": "/api/v1/logservice/logs/test",
"method": "POST",
"logType": "ERROR",
"title": "test",
"event": "test",
"resp": "success",
"ip": "127.0.0.1",
"time": 1693282489036,
"tags": ["test", "test"]
// ... other fields extracted from the JSON message
},
"fields": {
"@timestamp": [
"2023-08-28T16:26:10.764Z"
]
},
// ... other Elasticsearch-specific metadata
}
I have tried below methods still not work,Please help here.
Method1
@type tail
read_from_head true
tag kubernetes.*
path /var/log/containers/.log
pos_file /var/log/fluentd-containers.log.pos
exclude_path ["/var/log/containers/fluent"]
@type regexp
expression ^(?\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.\d+Z)\s(?[^\s]+)\s(?[^\s])\s(?.*)$
time_format %Y-%m-%dT%H:%M:%S.%NZ
Method2:
@type tail
read_from_head true
tag kubernetes.*
path /var/log/containers/.log
pos_file /var/log/fluentd-containers.log.pos
exclude_path ["/var/log/containers/fluent"]
@type regexp
expression ^(?\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.\d+Z)\s(?[^\s]+)\s(?[^\s])\s(?.*)$
time_format %Y-%m-%dT%H:%M:%S.%NZ
Method3:
@type tail
read_from_head true
tag kubernetes.*
path /var/log/containers/.log
pos_file /var/log/fluentd-containers.log.pos
exclude_path ["/var/log/containers/fluent"]
@type regexp
expression ^(?\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.\d+Z)\s(?[^\s]+)\s(?[^\s]+)\s(?.*)$
time_format %Y-%m-%dT%H:%M:%S.%NZ
<filter kubernetes.**>
@type kubernetes_metadata
@id filter_kube_metadata
kubernetes_url "#{ENV['FLUENT_FILTER_KUBERNETES_URL'] || 'https://' + ENV.fetch('KUBERNETES_SERVICE_HOST') + ':' + ENV.fetch('KUBERNETES_SERVICE_PORT') + '/api'}"
verify_ssl "#{ENV['KUBERNETES_VERIFY_SSL'] || true}"
ca_file "#{ENV['KUBERNETES_CA_FILE']}"
skip_labels "#{ENV['FLUENT_KUBERNETES_METADATA_SKIP_LABELS'] || 'false'}"
skip_container_metadata "#{ENV['FLUENT_KUBERNETES_METADATA_SKIP_CONTAINER_METADATA'] || 'false'}"
skip_master_url "#{ENV['FLUENT_KUBERNETES_METADATA_SKIP_MASTER_URL'] || 'false'}"
skip_namespace_metadata "#{ENV['FLUENT_KUBERNETES_METADATA_SKIP_NAMESPACE_METADATA'] || 'false'}"
<match **>
@type elasticsearch
host elasticsearch.evc-efk.svc.cluster.local
port 9200
logstash_format true
ssl_verify false
# more configurations...
Method4:
Beta Was this translation helpful? Give feedback.
All reactions