繁体   English   中英

Json文件从Filebeat到Logstash,然后到elasticsearch

[英]Json file from filebeat to Logstash and then to elasticsearch

我正在尝试提取根据json fileformat生成的库存数据。

{  
   "_meta":{  
      "hostvars":{  
         "host1":{  
            "foreman":{  
               "architecture_id":1,
               "architecture_name":"x86_64",
               "capabilities":[  
                  "build"
               ],
               "certname":"host1",
               "comment":"this is hostname1",
               "created_at":"2017-03-08T15:27:11Z",
               "disk":"10gb",
               "domain_id":5,

            },
            "foreman_facts":{  
               "boardmanufacturer":"Intel Corporation",
               "boardproductname":"440BX Desktop Reference Platform",
               "ipaddress":"1.1.1.1",
               "ipaddress_eth0":"1.1.1.2",
               "ipaddress_lo":"127.0.0.1",

            },
            "foreman_params":{  

            }
         },
         "host2":{  
            "foreman":{  
               "architecture_id":1,
               "architecture_name":"x86_64",
               "capabilities":[  
                  "build"
               ],
               "certname":"host2",
               "comment":"this hostname2",
               "created_at":"2017-03-08T15:27:11Z",
               "disk":"20gb",
               "domain_id":5,

            },
            "foreman_facts":{  
               "boardmanufacturer":"Intel Corporation",
               "boardproductname":"440BX Desktop Reference Platform",
               "ipaddress":"2.1.1.1",
               "ipaddress_eth0":"2.2.2.2",
               "ipaddress_lo":"127.0.0.1",

            },
            "foreman_params":{  

            }
         },
         "foreman_all":[  
            "host3",
            "host4",

         ],
         "foreman_environment: [ 
         "computer1",
         "computer2"
      ],

使用以下代码设法在ElasticSeach中获取数据。

文件节拍配置:

multiline.pattern: '^{'

multiline.negate: true

multiline.match: after

output.logstash:
  # The Logstash hosts
 hosts: ["localhost:5044"]

Logstash:

 input {
 beats {
        port => "5044"
       }

}

output {

elasticsearch {
        hosts => [ "10.1.7.5:9200" ]
index => "inventory-%{+YYYY-MM-dd}"
}
stdout {}

}

但是我注意到filebeat将整个json文件视为一条消息。 想知道我是否可以中断消息,仅发送hostvars部分并根据每个主机名对文档建立索引,并忽略上述json数据中的foreman_allforeman_environment字段。 上面是示例数据,我必须提取大约10万条记录,因此要确保我在网络上发送的数据尽可能少。

我想在Elasticsearch中以以下格式摄取数据。 想知道是否有人可以建议使用最佳配置。

弹性文件ID 1

computer name : "host1"
"architecture_id": 1,
"architecture_name": "x86_64",
"capabilities": ["build"],
"Company hardware name": "host1",
"comment": "this is hostname1",
"created_at": "2017-03-08T15:27:11Z",
"disk": "10gb",
"domain_id": 5,
"foreman_facts": {
"boardmanufacturer": "Intel Corporation",
"boardproductname": "440BX Desktop Reference Platform",
"ipaddress": "1.1.1.1",
"ipaddress_eth0": "1.1.1.2",
"ipaddress_lo": "127.0.0.1",

弹性文档ID 2

"computer name"" : "host2"
"architecture_id": 1,
"architecture_name": "x86_64",
"capabilities": ["build"],
"certname": "host2",
"comment": "this hostname2",
"created_at": "2017-03-08T15:27:11Z",
"disk": "20gb",
"domain_id": 5,
"boardmanufacturer": "Intel Corporation",
"boardproductname": "440BX Desktop Reference Platform",
"ipaddress": "2.1.1.1",
"ipaddress_eth0": "2.2.2.2",
"ipaddress_lo": "127.0.0.1",

  1. 首先,您应该在filebeat.yml中设置document_type ,如下所示:

     filebeat: prospectors: - input_type: log paths: - "/home/ubuntu/data/test.json" document_type: json json.message_key: log json.keys_under_root: true json.overwrite_keys: true 

并查看这可能会有所帮助: https : //www.elastic.co/blog/structured-logging-filebeat

  1. 然后您可以在logstash中获取json值并将其设置为新字段(在logstash.conf中进行配置):

     json { source => "parameter" target => "parameterData" remove_field => "parameter" } 

文件: https : //www.elastic.co/guide/zh-CN/logstash/current/plugins-filters-json.html

  1. 您可以使用std_instd_out在logstash测试。

我已经按照您的建议使用了以下配置,并看到上述Json错误消息,请在此处输入图像描述 看起来像filbeat单独发送每一行,如果我使用多行选项(如下所述),那么我将看到Filebeat和logstash将整个json文件发送为一条消息。 这就是我想要根据上述主机名打破消息的目的。

multiline.pattern: '^{'

multiline.negate: true

multiline.match: after

#=========================== Filebeat配置==================== =========

filebeat.prospectors:


- type: log

  # Change to true to enable this prospector configuration.
  enabled: true

  # Paths that should be crawled and fetched. Glob based paths.
  paths:
    - /opt/uploaddata/*.json
    #- c:\programdata\elasticsearch\logs\*


  ### JSON configuration

  document_type: json

  json.message_key: log


  json.keys_under_root: true

  json.overwrite_keys: true

  #json.add_error_key: false


output.logstash:
  # The Logstash hosts
  hosts: ["localhost:5044"]


#=========================== Logstash  =============================
input {
 beats {
        port => "5044"
       }
}

filter {
json

{
      source => "parameter"
      target => "parameterData"
      remove_field => "parameter"
}

}
output {

elasticsearch {
        hosts => [ "10.138.7.51:9200" ]
index => "inventory-%{+YYYY-MM-dd}"
}
stdout {
codec => rubydebug
}
}

#=========================== Filbear Errors =============================

2017/11/24 16:45:14.226665 json.go:32: ERR Error decoding JSON: json: cannot unmarshal string into Go value of type map[string]interface {}
2017/11/24 16:45:14.226757 processor.go:262: DBG Publish event: {
  "@timestamp": "2017-11-24T16:45:14.226Z",
  "@metadata": {
    "beat": "filebeat",
    "type": "doc",
    "version": "6.0.0"
  },
  "json": {},
  "message": "            \"host4\",",
  "prospector": {
    "type": "log"
  },
  "beat": {
    "name": "filebeat",
    "hostname": "filebeat",
    "version": "6.0.0"
  },
  "source": "/opt/uploaddata/data.json",
  "offset": 1710
}
2017/11/24 16:45:14.226800 json.go:32: ERR Error decoding JSON: EOF
2017/11/24 16:45:14.226889 processor.go:262: DBG Publish event: {
  "@timestamp": "2017-11-24T16:45:14.226Z",
  "@metadata": {
    "beat": "filebeat",
    "type": "doc",
    "version": "6.0.0"
  },
  "json": {},
  "message": "",
  "source": "/opt/uploaddata/data.json",
  "offset": 1712,
  "prospector": {
    "type": "log"
  },
  "beat": {
    "name": "filebeat",
    "hostname": "filebeat",
    "version": "6.0.0"
  }


#=========================== Logstash Logs  =============================

{
    "@timestamp" => 2017-11-24T16:45:14.226Z,
        "offset" => 1638,
      "@version" => "1",
          "beat" => {
            "name" => "filebeat",
        "hostname" => "filebeat",
         "version" => "6.0.0"
    },
          "host" => "filebeat",
    "prospector" => {
        "type" => "log"
    },
          "json" => {},
        "source" => "/opt/uploaddata/data.json",
       "message" => "         },",
          "tags" => [
        [0] "beats_input_codec_plain_applied"
    ]
}
{
    "@timestamp" => 2017-11-24T16:45:14.226Z,
        "offset" => 1666,
      "@version" => "1",
          "beat" => {
            "name" => "filebeat",
        "hostname" => "filebeat",
         "version" => "6.0.0"
    },
          "host" => "filebeat",
          "json" => {},
    "prospector" => {
        "type" => "log"
    },
        "source" => "/opt/uploaddata/data.json",
       "message" => "         \"foreman_all\":[  ",
          "tags" => [
        [0] "beats_input_codec_plain_applied"
    ]
}

暂无
暂无

声明:本站的技术帖子网页,遵循CC BY-SA 4.0协议,如果您需要转载,请注明本站网址或者原文地址。任何问题请咨询:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM