wiki:ELK

Version 14 (modified by krit, 4 years ago) (diff)

--

ELK

  1. create internal docker network
    docker network create -d bridge mynetwork
    
  1. start elasticsearch
    docker run -p 9200:9200 -p 9300:9300 --net=mynetwork --name elasticsearch -e "discovery.type=single-node" docker.elastic.co/elasticsearch/elasticsearch:7.6.2 
    
  1. start logstash
    docker run -d --name=logstash --network=mynetwork --env LOGSTASH_CONF_FILENAME=logstash.conf -v /home/krit/Documents/Dockers/ELK/Bitnami/bitnami-docker-logstash/pipeline:/bitnami/logstash/config bitnami.net/logstash:latest
    
    # to asscess as root user 
    docker exec -it --user root logstash  bash
    
  1. start kibana
    docker run --rm --name kibana --net=mynetwork -p 5601:5601 kibana:7.6.2  
    

Query index/data/add/delete

Show index (DB) that use in ELK

curl -X GET localhost:9200/_cat/indices

When start logstash, we need to edit logstash.conf under pipeline/logstash.conf

[krit@mini bitnami-docker-logstash]$ cat pipeline/logstash.conf 
input {
  http {
    host => "0.0.0.0" # default: 0.0.0.0
    port => 31311 # default: 8080
  }
}

filter {
  csv {
    separator => ","
    columns => ["ID", "Moisture", "Temperature", "Light"]
    convert => {
      "Moisture" => "float"
      "Temperature" => "float"
      "Light" => "float"
    }
  }

  mutate {
    remove_field => ["host", "headers"]
  }
}

output {
  elasticsearch {
    hosts => ["elasticsearch:9200"]
    index => "logstash-2021.02.02-000001"
  }
}
[krit@mini bitnami-docker-logstash]$ 

Add data

curl -XPUT 'http://172.18.0.3:31311/twitter/tweet/1' -d 'abc,4.0,5.0,1.0' 

Search and show all data

curl -X GET http://localhost:9200/logstash-2021.02.02-000001/_doc/_search?pretty=true

Query by date

 curl -X POST -H "Content-Type: application/json" http://127.0.0.1:9200/mjson-2021-02-09/_doc/_search?pretty=true -d '{"query":{"range":{"@timestamp":{"from":"2021-02-08T00:00:00","to":"2021-02-09T23:59:59" }}}}'

Delete data from date to date, TIME was GMT ex. in Kibana show 10:00 but in query should be 10-7 = 03:00

curl -X POST -H "Content-Type: application/json" http://127.0.0.1:9200/mjson-2021-02-09/_doc/_delete_by_query -d '{"query":{"range":{"@timestamp":{"from":"2021-02-08T00:00:00","to":"2021-02-09T23:59:59" }}}}'

Directly manage to ES with Alias index

curl -H "Content-Type: application/json" -XPOST "http://localhost:9200/test1/_doc" -d "{ \"id\" : 1}"
curl -H "Content-Type: application/json" -XPOST "http://localhost:9200/test2/_doc" -d "{ \"id\" : 2}"

curl -X GET http://127.0.0.1:9200/test1/_doc/_search?pretty

# alias add with
# 1st method
curl -X POST "localhost:9200/_aliases?pretty" -H 'Content-Type: application/json' -d'
{
    "actions" : [
        { "add" : { "indices" : ["test1", "test2"], "alias" : "test" } }
    ]
}'
 
# 2nd method
curl -X POST "localhost:9200/_aliases?pretty" -H 'Content-Type: application/json' -d'
{
    "actions" : [
        { "add" : { "index" : "test1", "alias" : "alias1" } },
        { "add" : { "index" : "test2", "alias" : "alias1" } }
    ]
}'

# remove test2 from alias test
curl -X POST "localhost:9200/_aliases?pretty" -H 'Content-Type: application/json' -d'
{
    "actions" : [
        { "remove" : { "index" : "test2", "alias" : "test" } }
    ]
}'

# remove test1 from alias test and add test2 to alias test
curl -X POST "localhost:9200/_aliases?pretty" -H 'Content-Type: application/json' -d'
{
    "actions" : [
        { "remove" : { "index" : "test1", "alias" : "test" } },
        { "add" : { "index" : "test2", "alias" : "test" } }
    ]
}'


curl -X GET localhost:9200/_cat/indices

#Once you have deleted your index, the alias will also get deleted.
#In your case, you can directly delete test1 and test2.

Snapshot

curl -X PUT "localhost:9200/_snapshot/backup-repo?pretty" -H 'Content-Type: application/json' -d'
{
  "type": "fs",
  "settings": {
    "location": "/usr/share/elasticsearch/data/repo",
    "compress": true
  }
}'

# check snapshot config
curl -X GET "localhost:9200/_snapshot?pretty"

# config snapshot
curl -X PUT "localhost:9200/_snapshot/backup-repo/snapshot-1?pretty"

# cmd to take snapshot
curl -X GET "localhost:9200/_snapshot/backup-repo/snapshot-1?pretty"

# check snapshot status 
curl -X GET "localhost:9200/_snapshot/backup-repo/snapshot-1/_status?pretty"

# stop all indices 
curl -X POST "localhost:9200/_all/_close?pretty"

# restore snapshot make sure to CLOSE all indices BEFORE restore
curl -X POST "localhost:9200/_snapshot/backup-repo/snapshot-1/_restore?pretty"

# start open indices NO NEED 
curl -X POST "localhost:9200/_all/_open?pretty"


Schedule snapshot every day at 3:03:00 AM

curl  -X PUT "localhost:9200/_slm/policy/backup_policy_daily" -H 'Content-Type:
 application/json' -d'
{
  "schedule": "0 03 3 * * ?",
  "name": "<backup-{now/d}>",
  "repository": "backup-repo",
  "config": {
     "indices": ["*"]
   },
  "retention": {
      "expire_after": "2d"
  }
}'

# check policy
curl -X GET "localhost:9200/_slm/policy/backup_policy_daily?pretty"

# manual execute 
curl -X POST "localhost:9200/_slm/policy/backup_policy_daily/_execute"

# list all snapshots
 curl -X GET http://127.0.0.1:9200/_cat/snapshots/backup-repo?pretty

Attachments (1)

Download all attachments as: .zip