2
votes

I'm trying to create an Elastic search installation using docker containers. I'm using only elastic.io provider's images.

I'm encountering an error when starting my logstash instance.

Here is my configurations :

docker-compose.yml

version: '2'
services:
  elasticsearch1:
    image: docker.elastic.co/elasticsearch/elasticsearch:5.2.2
    container_name: elasticsearch1
    environment:
      - cluster.name=docker-cluster
      - bootstrap.memory_lock=true
      - "ES_JAVA_OPTS=-Xms512m -Xmx512m"
    ulimits:
      memlock:
        soft: -1
        hard: -1
      nofile:
        soft: 65536
        hard: 65536
    mem_limit: 1g
    cap_add:
      - IPC_LOCK
    volumes:
      - esdata1:/usr/share/elasticsearch/data
    networks:
      - esnet
  elasticsearch2:
    image: docker.elastic.co/elasticsearch/elasticsearch:5.2.2
    container_name: elasticsearch2
    environment:
      - cluster.name=docker-cluster
      - bootstrap.memory_lock=true
      - "ES_JAVA_OPTS=-Xms512m -Xmx512m"
      - "discovery.zen.ping.unicast.hosts=elasticsearch1"
    ulimits:
      memlock:
        soft: -1
        hard: -1
      nofile:
        soft: 65536
        hard: 65536
    mem_limit: 1g
    cap_add:
      - IPC_LOCK
    volumes:
      - esdata2:/usr/share/elasticsearch/data
    networks:
      - esnet
      
  logstash:
    image: docker.elastic.co/logstash/logstash:5.2.2
    container_name: logstash
    mem_limit: 1g
    links: 
      - elasticsearch1:elasticsearch
    volumes: 
      - ./logstash.yml:/usr/share/logstash/config/logstash.yml

volumes:
  esdata1:
    driver: local
  esdata2:
    driver: local

networks:
  esnet:
    driver: bridge

an now my logstash.yml

node: 
  name: 'default logstash'
http: 
  host: elasticsearch
pipeline:
  workers: 1
  batch:
    size: 12
    delay: 5
log: 
  level: 'debug'

I don't know why but logstash tell me that he cannot connect to the ElasticSearch instance with this error message:

...
[DEBUG][logstash.runner] *http.host: "elasticsearch" (default: "127.0.0.1")
...
[DEBUG][logstash.outputs.elasticsearch] config LogStash::Ouputs::ElasticSearch/@hosts = [http://localhost:9200]
[INFO][logstash.ouputs.elasticsearch] Elasticsearch pool URLs updated {:changes=>:removed>[], :added=>[logstash_system:xxxxx@localhost:9200/_xpack/monitoring/?system_id=logstash&system_api_version=2&interval=1s]}}

Could someone tell me why logstash is using a bad host event if he really got my settings?

1
Have you tried accessing the elasticsearch node on via a HTTP request on the port 9200?Adonis
It seems that Logstash is not pointing to the elasticsearh event. Not a communication problem ?Kévin BEAUGRAND

1 Answers

2
votes

I found a solution by replacing my logstash.yml by a logstash.conf file :


    input {
      stdin { }
      beats {
        port => 5044
      }
    }

    output {
      elasticsearch {
        hosts    => [ 'elasticsearch' ]
        user     => 'elastic'
        password => 'changeme'
      }
    }

Finally, I change my docker-compose file to link this conf file:


    volumes: 
      - ./logstash-pipeline/:/usr/share/logstash/pipeline/