= ELK = 1. create internal docker network {{{ docker network create -d bridge mynetwork }}} 2. start elasticsearch {{{ docker run -p 9200:9200 -p 9300:9300 --net=mynetwork --name elasticsearch -e "discovery.type=single-node" docker.elastic.co/elasticsearch/elasticsearch:7.6.2 }}} 3. start logstash {{{ docker run -d --name=logstash --network=mynetwork --env LOGSTASH_CONF_FILENAME=logstash.conf -v /home/krit/Documents/Dockers/ELK/Bitnami/bitnami-docker-logstash/pipeline:/bitnami/logstash/config bitnami.net/logstash:latest # to asscess as root user docker exec -it --user root logstash bash }}} 4. start kibana {{{ docker run --rm --name kibana --net=mynetwork -p 5601:5601 kibana:7.6.2 }}} == Query index/data/add/delete == show index (DB) that use in ELK {{{ curl -X GET localhost:9200/_cat/indices }}} When start logstash, we need to edit logstash.conf under pipeline/logstash.conf {{{ #!sh [krit@mini bitnami-docker-logstash]$ cat pipeline/logstash.conf input { http { host => "0.0.0.0" # default: 0.0.0.0 port => 31311 # default: 8080 } } filter { csv { separator => "," columns => ["ID", "Moisture", "Temperature", "Light"] convert => { "Moisture" => "float" "Temperature" => "float" "Light" => "float" } } mutate { remove_field => ["host", "headers"] } } output { elasticsearch { hosts => ["elasticsearch:9200"] index => "logstash-2021.02.02-000001" } } [krit@mini bitnami-docker-logstash]$ }}} add data {{{ curl -XPUT 'http://172.18.0.3:31311/twitter/tweet/1' -d 'abc,4.0,5.0,1.0' }}} search and show all data {{{ curl -X GET http://localhost:9200/logstash-2021.02.02-000001/_doc/_search?pretty=true }}}