Docker compose sample does not start

I’m using the sample docker compose file on Docker - Open Distro Documentation with

amazon/opendistro-for-elasticsearch                                1.8.0                                            b18a6e6ddf64        4 weeks ago         1.12GB
amazon/opendistro-for-elasticsearch-kibana                         1.8.0                                            356cd3c3705e        4 weeks ago         1.27GB

I tried to run docker compose up overnight. There were probably 100s of MB of log output, but it eventually settled with something like:

odfe-node2    | [2020-07-05T16:32:32,903][INFO ][c.a.o.j.s.JobSweeper     ] [odfe-node2] Running full sweep
odfe-node2    | [2020-07-05T16:32:33,655][INFO ][c.a.o.e.p.c.ScheduledMetricCollectorsExecutor] [odfe-node2] Collector OSMetrics is still in progress, so skipping this Interval
odfe-node2    | [2020-07-05T16:32:32,758][ERROR][c.a.o.s.a.s.InternalESSink] [odfe-node2] Unable to index audit log {"audit_cluster_name":"odfe-cluster","audit_node_name":"odfe-node2","audit_category":"SSL_EXCEPTION","audit_request_origin":"TRANSPORT","audit_node_id":"haooiiH6T3CnGNX8I7hRlw","audit_request_layer":"TRANSPORT","audit_request_exception_stacktrace":"javax.net.ssl.SSLHandshakeException: Insufficient buffer remaining for AEAD cipher fragment (2). Needs to be more than tag size (16)\n\tat java.base/sun.security.ssl.Alert.createSSLException(Alert.java:131)\n\tat java.base/sun.security.ssl.TransportContext.fatal(TransportContext.java:320)\n\tat java.base/sun.security.ssl.TransportContext.fatal(TransportContext.java:263)\n\tat java.base/sun.security.ssl.TransportContext.fatal(TransportContext.java:258)\n\tat java.base/sun.security.ssl.SSLTransport.decode(SSLTransport.java:129)\n\tat java.base/sun.security.ssl.SSLEngineImpl.decode(SSLEngineImpl.java:681)\n\tat java.base/sun.security.ssl.SSLEngineImpl.readRecord(SSLEngineImpl.java:636)\n\tat java.base/sun.security.ssl.SSLEngineImpl.unwrap(SSLEngineImpl.java:454)\n\tat java.base/sun.security.ssl.SSLEngineImpl.unwrap(SSLEngineImpl.java:433)\n\tat java.base/javax.net.ssl.SSLEngine.unwrap(SSLEngine.java:634)\n\tat io.netty.handler.ssl.SslHandler$SslEngineType$3.unwrap(SslHandler.java:281)\n\tat io.netty.handler.ssl.SslHandler.unwrap(SslHandler.java:1324)\n\tat io.netty.handler.ssl.SslHandler.decodeJdkCompatible(SslHandler.java:1219)\n\tat io.netty.handler.ssl.SslHandler.decode(SslHandler.java:1266)\n\tat io.netty.handler.codec.ByteToMessageDecoder.decodeRemovalReentryProtection(ByteToMessageDecoder.java:498)\n\tat io.netty.handler.codec.ByteToMessageDecoder.callDecode(ByteToMessageDecoder.java:437)\n\tat io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:276)\n\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:377)\n\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:363)\n\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:355)\n\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:377)\n\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:363)\n\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysPlain(NioEventLoop.java:615)\n\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:578)\n\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n\tat java.base/java.lang.Thread.run(Thread.java:835)\nCaused by: javax.crypto.BadPaddingException: Insufficient buffer remaining for AEAD cipher fragment (2). Needs to be more than tag size (16)\n\tat java.base/sun.security.ssl.SSLCipher$T13GcmReadCipherGenerator$GcmReadCipher.decrypt(SSLCipher.java:1878)\n\tat java.base/sun.security.ssl.SSLEngineInputRecord.decodeInputRecord(SSLEngineInputRecord.java:240)\n\tat java.base/sun.security.ssl.SSLEngineInputRecord.decode(SSLEngineInputRecord.java:197)\n\tat java.base/sun.security.ssl.SSLEngineInputRecord.decode(SSLEngineInputRecord.java:160)\n\tat java.base/sun.security.ssl.SSLTransport.decode(SSLTransport.java:108)\n\t... 27 more\n","@timestamp":"2020-07-05T15:18:16.187+00:00","audit_request_effective_user_is_admin":false,"audit_format_version":4,"audit_node_host_address":"172.19.0.4","audit_node_host_name":"172.19.0.4"} due to ClusterBlockException[blocked by: [SERVICE_UNAVAILABLE/2/no master];]
odfe-node2    | org.elasticsearch.cluster.block.ClusterBlockException: blocked by: [SERVICE_UNAVAILABLE/2/no master];
odfe-node2    | 	at org.elasticsearch.cluster.block.ClusterBlocks.globalBlockedException(ClusterBlocks.java:189) ~[elasticsearch-7.7.0.jar:7.7.0]
odfe-node2    | 	at org.elasticsearch.action.bulk.TransportBulkAction$BulkOperation.handleBlockExceptions(TransportBulkAction.java:537) ~[elasticsearch-7.7.0.jar:7.7.0]
odfe-node2    | 	at org.elasticsearch.action.bulk.TransportBulkAction$BulkOperation.doRun(TransportBulkAction.java:417) ~[elasticsearch-7.7.0.jar:7.7.0]
odfe-node2    | 	at org.elasticsearch.common.util.concurrent.AbstractRunnable.run(AbstractRunnable.java:37) ~[elasticsearch-7.7.0.jar:7.7.0]
odfe-node2    | 	at org.elasticsearch.action.bulk.TransportBulkAction$BulkOperation$2.onTimeout(TransportBulkAction.java:571) ~[elasticsearch-7.7.0.jar:7.7.0]
odfe-node2    | 	at org.elasticsearch.cluster.ClusterStateObserver$ContextPreservingListener.onTimeout(ClusterStateObserver.java:325) ~[elasticsearch-7.7.0.jar:7.7.0]
odfe-node2    | 	at org.elasticsearch.cluster.ClusterStateObserver$ObserverClusterStateListener.onTimeout(ClusterStateObserver.java:252) ~[elasticsearch-7.7.0.jar:7.7.0]
odfe-node2    | 	at org.elasticsearch.cluster.service.ClusterApplierService$NotifyTimeout.run(ClusterApplierService.java:598) ~[elasticsearch-7.7.0.jar:7.7.0]
odfe-node2    | 	at org.elasticsearch.common.util.concurrent.ThreadContext$ContextPreservingRunnable.run(ThreadContext.java:633) ~[elasticsearch-7.7.0.jar:7.7.0]
odfe-node2    | 	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) [?:?]
odfe-node2    | 	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) [?:?]
odfe-node2    | 	at java.lang.Thread.run(Thread.java:835) [?:?]
odfe-node1    | [2020-07-05T16:32:33,911][INFO ][c.a.o.e.p.c.ScheduledMetricCollectorsExecutor] [odfe-node1] Collector OSMetrics is still in progress, so skipping this Interval
odfe-node2    | [2020-07-05T16:32:33,959][INFO ][stdout                   ] [odfe-node2] AUDIT_LOG: {
odfe-node2    | [2020-07-05T16:32:33,959][INFO ][stdout                   ] [odfe-node2]   "audit_cluster_name" : "odfe-cluster",
odfe-node2    | [2020-07-05T16:32:33,960][INFO ][stdout                   ] [odfe-node2]   "audit_node_name" : "odfe-node2",
odfe-node2    | [2020-07-05T16:32:33,960][INFO ][stdout                   ] [odfe-node2]   "audit_category" : "SSL_EXCEPTION",
odfe-node2    | [2020-07-05T16:32:33,960][INFO ][stdout                   ] [odfe-node2]   "audit_request_origin" : "TRANSPORT",
odfe-node2    | [2020-07-05T16:32:33,960][INFO ][stdout                   ] [odfe-node2]   "audit_node_id" : "haooiiH6T3CnGNX8I7hRlw",
odfe-node2    | [2020-07-05T16:32:33,960][INFO ][stdout                   ] [odfe-node2]   "audit_request_layer" : "TRANSPORT",
odfe-node2    | [2020-07-05T16:32:33,960][INFO ][stdout                   ] [odfe-node2]   "audit_request_exception_stacktrace" : "javax.net.ssl.SSLHandshakeException: Insufficient buffer remaining for AEAD cipher fragment (2). Needs to be more than tag size (16)\n\tat java.base/sun.security.ssl.Alert.createSSLException(Alert.java:131)\n\tat java.base/sun.security.ssl.TransportContext.fatal(TransportContext.java:320)\n\tat java.base/sun.security.ssl.TransportContext.fatal(TransportContext.java:263)\n\tat java.base/sun.security.ssl.TransportContext.fatal(TransportContext.java:258)\n\tat java.base/sun.security.ssl.SSLTransport.decode(SSLTransport.java:129)\n\tat java.base/sun.security.ssl.SSLEngineImpl.decode(SSLEngineImpl.java:681)\n\tat java.base/sun.security.ssl.SSLEngineImpl.readRecord(SSLEngineImpl.java:636)\n\tat java.base/sun.security.ssl.SSLEngineImpl.unwrap(SSLEngineImpl.java:454)\n\tat java.base/sun.security.ssl.SSLEngineImpl.unwrap(SSLEngineImpl.java:433)\n\tat java.base/javax.net.ssl.SSLEngine.unwrap(SSLEngine.java:634)\n\tat io.netty.handler.ssl.SslHandler$SslEngineType$3.unwrap(SslHandler.java:281)\n\tat io.netty.handler.ssl.SslHandler.unwrap(SslHandler.java:1324)\n\tat io.netty.handler.ssl.SslHandler.decodeJdkCompatible(SslHandler.java:1219)\n\tat io.netty.handler.ssl.SslHandler.decode(SslHandler.java:1266)\n\tat io.netty.handler.codec.ByteToMessageDecoder.decodeRemovalReentryProtection(ByteToMessageDecoder.java:498)\n\tat io.netty.handler.codec.ByteToMessageDecoder.callDecode(ByteToMessageDecoder.java:437)\n\tat io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:276)\n\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:377)\n\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:363)\n\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:355)\n\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:377)\n\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:363)\n\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysPlain(NioEventLoop.java:615)\n\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:578)\n\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n\tat java.base/java.lang.Thread.run(Thread.java:835)\nCaused by: javax.crypto.BadPaddingException: Insufficient buffer remaining for AEAD cipher fragment (2). Needs to be more than tag size (16)\n\tat java.base/sun.security.ssl.SSLCipher$T13GcmReadCipherGenerator$GcmReadCipher.decrypt(SSLCipher.java:1878)\n\tat java.base/sun.security.ssl.SSLEngineInputRecord.decodeInputRecord(SSLEngineInputRecord.java:240)\n\tat java.base/sun.security.ssl.SSLEngineInputRecord.decode(SSLEngineInputRecord.java:197)\n\tat java.base/sun.security.ssl.SSLEngineInputRecord.decode(SSLEngineInputRecord.java:160)\n\tat java.base/sun.security.ssl.SSLTransport.decode(SSLTransport.java:108)\n\t... 27 more\n",
odfe-node2    | [2020-07-05T16:32:33,960][INFO ][stdout                   ] [odfe-node2]   "@timestamp" : "2020-07-05T15:18:16.187+00:00",
odfe-node2    | [2020-07-05T16:32:33,960][INFO ][stdout                   ] [odfe-node2]   "audit_request_effective_user_is_admin" : false,
odfe-node2    | [2020-07-05T16:32:33,960][INFO ][stdout                   ] [odfe-node2]   "audit_format_version" : 4,
odfe-node2    | [2020-07-05T16:32:33,960][INFO ][stdout                   ] [odfe-node2]   "audit_node_host_address" : "172.19.0.4",
odfe-node2    | [2020-07-05T16:32:33,960][INFO ][stdout                   ] [odfe-node2]   "audit_node_host_name" : "172.19.0.4"

Is the sample docker-compose.yml supposed to work? How do I get ES & Kibana working with these images?

Thanks

Here’s the docker-compose.yml file for reference:

version: '3'
services:
  odfe-node1:
    image: amazon/opendistro-for-elasticsearch:1.8.0
    container_name: odfe-node1
    environment:
      - cluster.name=odfe-cluster
      - node.name=odfe-node1
      - discovery.seed_hosts=odfe-node1,odfe-node2
      - cluster.initial_master_nodes=odfe-node1,odfe-node2
      - bootstrap.memory_lock=true # along with the memlock settings below, disables swapping
      - "ES_JAVA_OPTS=-Xms512m -Xmx512m" # minimum and maximum Java heap size, recommend setting both to 50% of system RAM
    ulimits:
      memlock:
        soft: -1
        hard: -1
      nofile:
        soft: 65536 # maximum number of open files for the Elasticsearch user, set to at least 65536 on modern systems
        hard: 65536
    volumes:
      - odfe-data1:/usr/share/elasticsearch/data
    ports:
      - 9200:9200
      - 9600:9600 # required for Performance Analyzer
    networks:
      - odfe-net
  odfe-node2:
    image: amazon/opendistro-for-elasticsearch:1.8.0
    container_name: odfe-node2
    environment:
      - cluster.name=odfe-cluster
      - node.name=odfe-node2
      - discovery.seed_hosts=odfe-node1,odfe-node2
      - cluster.initial_master_nodes=odfe-node1,odfe-node2
      - bootstrap.memory_lock=true
      - "ES_JAVA_OPTS=-Xms512m -Xmx512m"
    ulimits:
      memlock:
        soft: -1
        hard: -1
      nofile:
        soft: 65536
        hard: 65536
    volumes:
      - odfe-data2:/usr/share/elasticsearch/data
    networks:
      - odfe-net
  kibana:
    image: amazon/opendistro-for-elasticsearch-kibana:1.8.0
    container_name: odfe-kibana
    ports:
      - 5601:5601
    expose:
      - "5601"
    environment:
      ELASTICSEARCH_URL: https://odfe-node1:9200
      ELASTICSEARCH_HOSTS: https://odfe-node1:9200
    networks:
      - odfe-net

volumes:
  odfe-data1:
  odfe-data2:

networks:
  odfe-net:

@F2020 I tested with the file. It works in my env with ubuntu os. Not sure what env you are using

I’m on OSX Docker version 19.03.8 build afacb8b