Я получаю ошибку, упомянутую в этом заголовке, каждый раз, когда запускаю свой контейнер docker ElasticSearch через docker -compose. Забавно, что, несмотря на жалобы на блокировку, ElastiSearch все равно запускается. Осматривая вокруг, я обнаружил, что некоторые люди говорят, что это потому, что какой-то процесс не убил, когда я запускаю docker -compose и остался сиротой. Что ж, после docker -compose down я проверил и не вижу ни одного контейнера вверх, и я начинаю с docker -compose up -d --remove-orphans, который, как предполагается, не оставляет контейнер-сироту включенным и работает насколько я понимаю. (Я обнаружил некоторую проблему git в отношении флага --remove-orphans примерно через 10 месяцев; go исправлено - проблема известна и исправлена
Вот моя полная docker -композиция. yml
version: '3.2'
services:
kibana:
image: docker.elastic.co/kibana/kibana:7.5.2
volumes:
- "./kibana.yml:/usr/share/kibana/config/kibana.yml"
restart: always
environment:
- SERVER_NAME=kibana.localhost
- ELASTICSEARCH_HOSTS=http://elasticsearch:9200
ports:
- "5601:5601"
links:
- elasticsearch
depends_on:
- elasticsearch
elasticsearch:
image: docker.elastic.co/elasticsearch/elasticsearch:7.5.2
environment:
- cluster.name=docker-cluster
- bootstrap.memory_lock=true
- "ES_JAVA_OPTS=-Xms512m -Xmx512m"
- xpack.security.enabled=false
- xpack.watcher.enabled=false
- discovery.type=single-node
ulimits:
memlock:
soft: -1
hard: -1
volumes:
- "./esdata:/usr/share/elasticsearch/data"
ports:
- "9200:9200"
logstash:
image: docker.elastic.co/logstash/logstash:7.5.2
volumes:
- "./logstash.conf:/config-dir/logstash.conf"
restart: always
command: logstash -f /config-dir/logstash.conf
ports:
- "9600:9600"
- "7777:7777"
links:
- elasticsearch
- kafka1
- kafka2
- kafka3
kafka1:
image: confluentinc/cp-kafka:latest
depends_on:
- zoo1
- zoo2
- zoo3
links:
- zoo1
- zoo2
- zoo3
ports:
- "9092:9092"
environment:
KAFKA_LISTENERS: PLAINTEXT://:9092
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka1:9092
KAFKA_BROKER_ID: 1
KAFKA_ADVERTISED_PORT: 9092
KAFKA_LOG_RETENTION_HOURS: "168"
KAFKA_LOG_RETENTION_BYTES: "100000000"
KAFKA_ZOOKEEPER_CONNECT: zoo1:2181,zoo2:2181,zoo3:2181
KAFKA_CREATE_TOPICS: "log:3:3"
KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'true'
kafka2:
image: confluentinc/cp-kafka:latest
depends_on:
- zoo1
- zoo2
- zoo3
links:
- zoo1
- zoo2
- zoo3
ports:
- "9093:9092"
environment:
KAFKA_LISTENERS: PLAINTEXT://:9092
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka2:9092
KAFKA_BROKER_ID: 2
KAFKA_ADVERTISED_PORT: 9092
KAFKA_LOG_RETENTION_HOURS: "168"
KAFKA_LOG_RETENTION_BYTES: "100000000"
KAFKA_ZOOKEEPER_CONNECT: zoo1:2181,zoo2:2181,zoo3:2181
KAFKA_CREATE_TOPICS: "log:3:3"
KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'true'
kafka3:
image: confluentinc/cp-kafka:latest
depends_on:
- zoo1
- zoo2
- zoo3
links:
- zoo1
- zoo2
- zoo3
ports:
- "9094:9092"
environment:
KAFKA_LISTENERS: PLAINTEXT://:9092
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka3:9092
KAFKA_BROKER_ID: 3
KAFKA_ADVERTISED_PORT: 9092
KAFKA_LOG_RETENTION_HOURS: "168"
KAFKA_LOG_RETENTION_BYTES: "100000000"
KAFKA_ZOOKEEPER_CONNECT: zoo1:2181,zoo2:2181,zoo3:2181
KAFKA_CREATE_TOPICS: "log:3:3"
KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'true'
zoo1:
image: confluentinc/cp-zookeeper:latest
environment:
MYID: 1
SERVERS: zoo1,zoo2,zoo3
ZOOKEEPER_CLIENT_PORT: 2181
ports:
- "2181:2181"
zoo2:
image: confluentinc/cp-zookeeper:latest
environment:
MYID: 2
SERVERS: zoo1,zoo2,zoo3
ZOOKEEPER_CLIENT_PORT: 2182
ports:
- "2182:2181"
zoo3:
image: confluentinc/cp-zookeeper:latest
environment:
MYID: 3
SERVERS: zoo1,zoo2,zoo3
ZOOKEEPER_CLIENT_PORT: 2183
ports:
- "2183:2181"
filebeat:
image: docker.elastic.co/beats/filebeat:7.5.2
volumes:
- "./filebeat.yml:/usr/share/filebeat/filebeat.yml:ro"
- "./sample-logs:/sample-logs"
links:
- kafka1
- kafka2
- kafka3
depends_on:
- kafka1
- kafka2
- kafka3
Вот наиболее релевантная часть журнала:
"o.e.b.ElasticsearchUncaughtExceptionHandler", "cluster.name": "docker-cluster", "node.name": "94d8bc6a1491", "message": "uncaught exception in thread [main]",
"stacktrace": ["org.elasticsearch.bootstrap.StartupException: org.apache.lucene.store.AlreadyClosedException: Underlying file changed by an external force at 1970-01-01T00:00:00Z,
(lock=NativeFSLock(path=/usr/share/elasticsearch/data/nodes/0/node.lock,impl=sun.nio.ch.FileLockImpl[0:9223372036854775807 exclusive valid],creationTime=2020-02-05T20:57:13.011548Z))",
"at org.elasticsearch.bootstrap.Elasticsearch.init(Elasticsearch.java:163) ~[elasticsearch-7.5.2.jar:7.5.2]",
"at org.elasticsearch.bootstrap.Elasticsearch.execute(Elasticsearch.java:150) ~[elasticsearch-7.5.2.jar:7.5.2]",
"at org.elasticsearch.cli.EnvironmentAwareCommand.execute(EnvironmentAwareCommand.java:86) ~[elasticsearch-7.5.2.jar:7.5.2]",
"at org.elasticsearch.cli.Command.mainWithoutErrorHandling(Command.java:125) ~[elasticsearch-cli-7.5.2.jar:7.5.2]",
"at org.elasticsearch.cli.Command.main(Command.java:90) ~[elasticsearch-cli-7.5.2.jar:7.5.2]",
"at org.elasticsearch.bootstrap.Elasticsearch.main(Elasticsearch.java:115) ~[elasticsearch-7.5.2.jar:7.5.2]",
"at org.elasticsearch.bootstrap.Elasticsearch.main(Elasticsearch.java:92) ~[elasticsearch-7.5.2.jar:7.5.2]",
"Caused by: org.apache.lucene.store.AlreadyClosedException: Underlying file changed by an external force at 1970-01-01T00:00:00Z, (lock=NativeFSLock(path=/usr/share/elasticsearch/data/nodes/0/node.lock,impl=sun.nio.ch.FileLockImpl[0:9223372036854775807 exclusive valid],creationTime=2020-02-05T20:57:13.011548Z))",
"at org.apache.lucene.store.NativeFSLockFactory$NativeFSLock.ensureValid(NativeFSLockFactory.java:191) ~[lucene-core-8.3.0.jar:8.3.0 2aa586909b911e66e1d8863aa89f173d69f86cd2 - ishan - 2019-10-25 23:10:03]",
"at org.elasticsearch.env.NodeEnvironment.assertEnvIsLocked(NodeEnvironment.java:1039) ~[elasticsearch-7.5.2.jar:7.5.2]",
"at org.elasticsearch.env.NodeEnvironment.nodeDataPaths(NodeEnvironment.java:789) ~[elasticsearch-7.5.2.jar:7.5.2]",
"at org.elasticsearch.env.NodeEnvironment.assertCanWrite(NodeEnvironment.java:1218) ~[elasticsearch-7.5.2.jar:7.5.2]",
"at org.elasticsearch.env.NodeEnvironment.<init>(NodeEnvironment.java:314) ~[elasticsearch-7.5.2.jar:7.5.2]",
"at org.elasticsearch.node.Node.<init>(Node.java:273) ~[elasticsearch-7.5.2.jar:7.5.2]",
"at org.elasticsearch.node.Node.<init>(Node.java:253) ~[elasticsearch-7.5.2.jar:7.5.2]",
"at org.elasticsearch.bootstrap.Bootstrap$5.<init>(Bootstrap.java:241) ~[elasticsearch-7.5.2.jar:7.5.2]",
"at org.elasticsearch.bootstrap.Bootstrap.setup(Bootstrap.java:241) ~[elasticsearch-7.5.2.jar:7.5.2]",
"at org.elasticsearch.bootstrap.Bootstrap.init(Bootstrap.java:369) ~[elasticsearch-7.5.2.jar:7.5.2]",
"at org.elasticsearch.bootstrap.Elasticsearch.init(Elasticsearch.java:159) ~[elasticsearch-7.5.2.jar:7.5.2]",
"... 6 more"] }
А вот поток StackOverFlow в основном говорит, что это проблема с процессом блокировки, но, похоже, парень не был используя docker введите описание ссылки здесь
В случае необходимости моя docker -компонентная версия:
C:\Dockers\megalog-try-1>docker-compose version
docker-compose version 1.25.2, build 698e2846
docker-py version: 4.1.0
CPython version: 2.7.16
OpenSSL version: OpenSSL 1.0.2q 20 Nov 2018
*** edit 1
C:\Dockers\megalog-try-1>docker ps
CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES
1d7af59ed31a docker.elastic.co/logstash/logstash:7.5.2 "/usr/local/bin/dock…" 16 hours ago Up 21 minutes 0.0.0.0:7777->7777/tcp, 5044/tcp, 0.0.0.0:9600->9600/tcp megalog-try-1_logstash_1
80af72445bfb confluentinc/cp-kafka:latest "/etc/confluent/dock…" 16 hours ago Up 21 minutes 0.0.0.0:9092->9092/tcp megalog-try-1_kafka1_1
e5d209fe2c42 confluentinc/cp-kafka:latest "/etc/confluent/dock…" 16 hours ago Up 21 minutes 0.0.0.0:9094->9092/tcp megalog-try-1_kafka3_1
c00871259fab confluentinc/cp-kafka:latest "/etc/confluent/dock…" 16 hours ago Up 21 minutes 0.0.0.0:9093->9092/tcp megalog-try-1_kafka2_1
9f4dccedbe17 docker.elastic.co/kibana/kibana:7.5.2 "/usr/local/bin/dumb…" 16 hours ago Up 21 minutes 0.0.0.0:5601->5601/tcp megalog-try-1_kibana_1
d240b6961d78 confluentinc/cp-zookeeper:latest "/etc/confluent/dock…" 16 hours ago Up 21 minutes 2888/tcp, 3888/tcp, 0.0.0.0:2183->2181/tcp megalog-try-1_zoo3_1
63bd8906ca83 confluentinc/cp-zookeeper:latest "/etc/confluent/dock…" 16 hours ago Up 21 minutes 2888/tcp, 0.0.0.0:2181->2181/tcp, 3888/tcp megalog-try-1_zoo1_1
3218d615cb19 confluentinc/cp-zookeeper:latest "/etc/confluent/dock…" 16 hours ago Up 21 minutes 2888/tcp, 3888/tcp, 0.0.0.0:2182->2181/tcp megalog-try-1_zoo2_1
13737ff80b03 docker.elastic.co/elasticsearch/elasticsearch:7.5.2 "/usr/local/bin/dock…" 16 hours ago Up 21 minutes 0.0.0.0:9200->9200/tcp, 9300/tcp megalog-try-1_elasticsearch_1