'Logstash parse multiple lat lon values

I have the following json file:

{
    "station": "A001",
    "X": 503744.7,
    "Y": 4726339.0,
    "Z": 458.84,
    "LON": -2.954286956913572,
    "LAT": 42.68952475979137,
    "dates": [
        "2015-01-01",
        "2015-01-02",
        "2015-01-03",
        "2015-01-04",
        "2015-01-05",
        "2015-01-06"
    ],
    "values": [
        "56.9",
        "49.7",
        "48.1",
        "37.1",
        "34.4",
        "35.9"
    ]
}{
    "station": "A002",
    "X": 607870.5,
    "Y": 4670754.0,
    "Z": 264.83,
    "LON": -1.69378623727067,
    "LAT": 42.18149989583031,
    "dates": [
        "2015-01-01",
        "2015-01-02",
        "2015-01-03",
        "2015-01-04"
    ],
    "values": [
        "287",
        "231",
        "207",
        "191"
    ]
}{
    "station": "A403",
    "X": 868708.0,
    "Y": 4709148.0,
    "Z": 849.0,
    "LON": 1.483146867002623,
    "LAT": 42.44694604132231,
    "dates": [
        "2015-01-01",
        "2015-01-02",
        "2015-01-03",
        "2015-01-04",
        "2015-01-05",
        "2015-01-06",
        "2015-01-07",
        "2015-01-08",
        "2015-01-09"
    ],
    "values": [
        "2.296",
        "7.033",
        "2.298",
        "2.275",
        "7.207",
        "5.456",
        "4.794",
        "4.24",
        "4.748"
    ]
}

I would like to extract all the lat lon values for each station to be then imported in KIBANA.

I have created the following conf file for logstash:

input {
    file {
        path => "/etc/logstash/json_data.json"
        sincedb_path => "/dev/null"
        start_position => "beginning"
        codec => multiline {
            pattern => "^Not_exists"
            negate => "true"
            what => "previous"
            auto_flush_interval => 1
            multiline_tag => ""
        }
    }
}
filter {
    json {
        source => "message"
        remove_field => [ "message" ]
        remove_field => [ "original" ]
    }
        #mutate {
        #       add_field => { "location" => "%{[A001][LAT]},%{[A001][LON]}" }
        #}
        #mutate { convert => { "location" => "float" }
        #}
         mutate {
            rename => {
                "LON" => "[locations][lon]"
                "LAT" => "[locations][lat]"
    }
         #remove_field => ["LAT", "LON"]
 }
}

output {
        elasticsearch {
            hosts => ["https://myIP:9200"]
            user => "elastic"
            password => "mypaswd"
            ssl_certificate_verification => false
            index => "test_geo_data_json"
            document_type => "json"
        }
        stdout {codec => rubydebug}
   }

What I'm getting is that logstash is reading only one station (the last one) and only its geopoint is created

Could you please tell me what I'm doing wrong?



Sources

This article follows the attribution requirements of Stack Overflow and is licensed under CC BY-SA 3.0.

Source: Stack Overflow

Solution Source