wiki:Logstash

Version 1 (modified by krit, 4 years ago) (diff)

--

Logstash

Use following logstash.conf

[krit@mini bitnami-docker-logstash]$ more logstash_http_multi_1.conf 
input {
  http {
    host => "0.0.0.0" # default: 0.0.0.0
    port => 31311 # default: 8080
  }
}

filter {
  csv {
    separator => ","
    columns => ["ID", "Moisture", "Temperature", "latitude", "longitude"]
    convert => {
      "Moisture" => "float"
      "Temperature" => "float"
    }
  }
  mutate { convert => {"latitude" => "float"} }
  mutate { convert => {"longitude" => "float"} }
  mutate { add_field => { "geoLocation" => "%{latitude} ,%{longitude}" } }
  
  mutate { remove_field => ["host", "headers"] }
}

output {
  elasticsearch {
    hosts => ["elasticsearch:9200"]
    index => "poi-2021-02-04"
  }
}

In kibana, to use map Geo Point we need to put following template to index poi-2021-02-04

PUT _template/geotemplate
{
  "index_patterns": ["poi-2021-02-04"],
  "settings": {},
  "mappings": { "properties": {"geoLocation": {"type": "geo_point"} } },
  "aliases": {}
}

Then test with following data

# AirPort
curl -XPUT 'http://172.18.0.3:31311/twitter/tweet/1' -d 'arl,5.0,3.0,13.69,100.7501'

# KMITL
curl -XPUT 'http://172.18.0.3:31311/twitter/tweet/1' -d 'kml,8.0,1.0,13.7299,100.7782'

# Cha-choeng-sao
curl -XPUT 'http://172.18.0.3:31311/twitter/tweet/1' -d 'ccs,8.0,1.0,13.6904,101.0780'

# Bang-Ka-Nak
curl -XPUT 'http://172.18.0.3:31311/twitter/tweet/1' -d 'bkn,1.0,1.0,13.8528,101.1165'

# Pra-Nom-Sara-Karm
curl -XPUT 'http://172.18.0.3:31311/twitter/tweet/1' -d 'psk,1.0,1.0,13.744,101.3470'

# Sa-mut-Pra-Karn
curl -XPUT 'http://172.18.0.3:31311/twitter/tweet/1' -d 'spk,1.0,1.0,13.5991,100.5998'

# Ra-Sa-ParkLen
curl -XPUT 'http://172.18.0.3:31311/twitter/tweet/1' -d 'rpk,1.0,1.0,13.895399,100.641752'

If we can to use json, we need to use logstash.conf as followng

[krit@mini bitnami-docker-logstash]$ cat logstash_http_josn_1.conf 
input {
  http {
    host => "0.0.0.0" # default: 0.0.0.0
    port => 31311 # default: 8080
  }
}

filter {
  json {
        source => "message"
  }
  if [ID] == "cc1" {
        drop {}
  }
}

output {
  elasticsearch {
    hosts => ["elasticsearch:9200"]
    index => "mjson-2021-02-09"
  }
}
[krit@mini bitnami-docker-logstash]$ 

We need to config kibana to know our template

PUT _template/jsongeotemplate
{
  "index_patterns": ["mjson-2021-02-09"],
  "settings": {},
  "mappings": { "properties": {"geoLocation": {"type": "geo_point"} } },
  "aliases": {}
}

Then we can test with following

curl -XPUT 'http://172.18.0.3:31311/twitter/tweet/1' -d 'rpk,1.0,1.0,13.895399,100.641752'

# AirPort
curl -X POST -H "Content-Type: application/json" 'http://172.18.0.3:31311/twitter/tweet/1' -d '{"ID": "arl", "speed": 1.2, "Lumi": 1, "l
at": 13.69, "lon": 100.7501}' 

# Ra-Sa-ParkLen
curl -X POST -H "Content-Type: application/json" 'http://172.18.0.3:31311/twitter/tweet/1' -d '{"ID": "rpk", "speed": 1.9, "Lumi": 2, "l
at": 13.89534, "lon": 100.641752 }'

# Sa-mut-Pra-Karn
curl -X POST -H "Content-Type: application/json" 'http://172.18.0.3:31311/twitter/tweet/1' -d '{"ID": "spk", "speed": 2.1, "Lumi": 3, "l
at": 13.5991, "lon": 100.5998 }'

# Cha-choeng-sao
curl -X POST -H "Content-Type: application/json" 'http://172.18.0.3:31311/twitter/tweet/1' -d '{"ID": "ccs", "speed": 2.4, "Lumi": 1, "l
at": 13.6904, "lon": 101.0780}' 

# Bang-ka-nak
curl -X POST -H "Content-Type: application/json" 'http://172.18.0.3:31311/twitter/tweet/1' -d '{"ID": "bkn", "speed": 1.4, "Lumi": 1, "l
at": 13.8528, "lon": 101.1165}' 

# Pra-Nom-Sara-Karm
curl -X POST -H "Content-Type: application/json" 'http://172.18.0.3:31311/twitter/tweet/1' -d '{"ID": "psk", "speed": 1.4, "Lumi": 1, "l
at": 13.744, "lon": 101.3470}' 

# KMITL
curl -X POST -H "Content-Type: application/json" 'http://172.18.0.3:31311/twitter/tweet/1' -d '{"ID": "kml", "speed": 1.4, "Lumi": 1, "l
at": 13.7299, "lon": 100.7782}' 

Pls note: if ID:cc1 logstash will drop this ID since in the logstash.conf, it will drop this ID

To have more index with different ports, we need to use logstash.conf as following

[krit@mini bitnami-docker-logstash]$ more logstash_http_josn_3.conf
input {
  http {
    type => "farm"
    host => "0.0.0.0" # default: 0.0.0.0
    port => 31311 # default: 8080
  }
}

input {
  http {
    type => "ev"
    host => "0.0.0.0" # default: 0.0.0.0
    port => 8080 # default: 8080
  }
}

filter {

  if [type] == "farm" {
      json {
            source => "message"
      }
      mutate { add_field => { "geoLocation" => "%{[lat]} ,%{[lon]}" } }
      mutate { remove_field => ["host", "headers"] }
  }
  if [type] == "ev" {
      json {
            source => "message"
      }
      mutate { add_field => { "geoLocation" => "%{[lat]} ,%{[lon]}" } }
      mutate { remove_field => ["host", "headers"] }
  }
}

output {

   if [type] == "farm" {
      elasticsearch {
        hosts => ["elasticsearch:9200"]
        index => "mjson-2021-02-09"
      }
   }
   if [type] == "ev" {
      elasticsearch {
        hosts => ["elasticsearch:9200"]
        index => "ev-2021-02-09"
      }
   }

}
[krit@mini bitnami-docker-logstash]$ 

The config Kibana to know GeoPoint? in index ev-2021-02-09 as following

PUT _template/evgeotemplate
{
  "index_patterns": ["ev-2021-02-09"],
  "settings": {},
  "mappings": { "properties": {"geoLocation": {"type": "geo_point"} } },
  "aliases": {}
}

Then test with port 8080 for index ev or 31311 for mjson

# AirPort
curl -X POST -H "Content-Type: application/json" 'http://172.18.0.3:31311/twitter/tweet/1' -d '{"ID": "arl", "speed": 1.2, "Lumi": 1, "l
at": 13.69, "lon": 100.7501}' 

# AirPort
curl -X POST -H "Content-Type: application/json" 'http://172.18.0.3:8080/twitter/tweet/1' -d '{"ID": "arl", "speed": 1.2, "Lumi": 1, "l
at": 13.69, "lon": 100.7501}'