Skip to content

Commit

Permalink
v2.2.1
Browse files Browse the repository at this point in the history
  • Loading branch information
Tynab committed Apr 13, 2023
1 parent bfa284d commit d1f34e8
Show file tree
Hide file tree
Showing 66 changed files with 2,225 additions and 49 deletions.
52 changes: 52 additions & 0 deletions .env
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
ELASTIC_VERSION=8.6.2

## Passwords for stack users
#

# User 'elastic' (built-in)
#
# Superuser role, full access to cluster management and data indices.
# https://www.elastic.co/guide/en/elasticsearch/reference/current/built-in-users.html
ELASTIC_PASSWORD='admin123@'

# User 'logstash_internal' (custom)
#
# The user Logstash uses to connect and send data to Elasticsearch.
# https://www.elastic.co/guide/en/logstash/current/ls-security.html
LOGSTASH_INTERNAL_PASSWORD='admin123@'

# User 'kibana_system' (built-in)
#
# The user Kibana uses to connect and communicate with Elasticsearch.
# https://www.elastic.co/guide/en/elasticsearch/reference/current/built-in-users.html
KIBANA_SYSTEM_PASSWORD='admin123@'

# Users 'metricbeat_internal', 'filebeat_internal' and 'heartbeat_internal' (custom)
#
# The users Beats use to connect and send data to Elasticsearch.
# https://www.elastic.co/guide/en/beats/metricbeat/current/feature-roles.html
METRICBEAT_INTERNAL_PASSWORD='admin123@'
FILEBEAT_INTERNAL_PASSWORD='admin123@'
HEARTBEAT_INTERNAL_PASSWORD='admin123@'

# User 'monitoring_internal' (custom)
#
# The user Metricbeat uses to collect monitoring data from stack components.
# https://www.elastic.co/guide/en/elasticsearch/reference/current/how-monitoring-works.html
MONITORING_INTERNAL_PASSWORD='admin123@'

# User 'beats_system' (built-in)
#
# The user the Beats use when storing monitoring information in Elasticsearch.
# https://www.elastic.co/guide/en/elasticsearch/reference/current/built-in-users.html
BEATS_SYSTEM_PASSWORD='admin123@'

# RabbitMQ
#
RABBITMQ_DEFAULT_USER='rabbit'
RABBITMQ_DEFAULT_PASS='admin123@'

# Kafka
#
KAFKA_CLIENT_USERS='kafka'
KAFKA_CLIENT_PASSWORDS='admin123@'
58 changes: 58 additions & 0 deletions docker-compose.dcproj
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,68 @@
<DockerServiceName>yanlib.httpapi.host</DockerServiceName>
</PropertyGroup>
<ItemGroup>
<None Include=".env" />
<None Include="docker-compose.override.yml">
<DependentUpon>docker-compose.yml</DependentUpon>
</None>
<None Include="docker-compose.yml" />
<None Include=".dockerignore" />
<None Include="elasticsearch\.dockerignore" />
<None Include="elasticsearch\config\elasticsearch.yml" />
<None Include="elasticsearch\Dockerfile" />
<None Include="extensions\curator\.dockerignore" />
<None Include="extensions\curator\config\curator.yml" />
<None Include="extensions\curator\config\delete_log_files_curator.yml" />
<None Include="extensions\curator\curator-compose.yml" />
<None Include="extensions\curator\Dockerfile" />
<None Include="extensions\curator\README.md" />
<None Include="extensions\enterprise-search\.dockerignore" />
<None Include="extensions\enterprise-search\config\enterprise-search.yml" />
<None Include="extensions\enterprise-search\Dockerfile" />
<None Include="extensions\enterprise-search\enterprise-search-compose.yml" />
<None Include="extensions\enterprise-search\README.md" />
<None Include="extensions\filebeat\.dockerignore" />
<None Include="extensions\filebeat\config\filebeat.yml" />
<None Include="extensions\filebeat\Dockerfile" />
<None Include="extensions\filebeat\filebeat-compose.yml" />
<None Include="extensions\filebeat\README.md" />
<None Include="extensions\fleet\.dockerignore" />
<None Include="extensions\fleet\agent-apmserver-compose.yml" />
<None Include="extensions\fleet\Dockerfile" />
<None Include="extensions\fleet\fleet-compose.yml" />
<None Include="extensions\fleet\README.md" />
<None Include="extensions\heartbeat\.dockerignore" />
<None Include="extensions\heartbeat\config\heartbeat.yml" />
<None Include="extensions\heartbeat\Dockerfile" />
<None Include="extensions\heartbeat\heartbeat-compose.yml" />
<None Include="extensions\heartbeat\README.md" />
<None Include="extensions\logspout\.dockerignore" />
<None Include="extensions\logspout\build.sh" />
<None Include="extensions\logspout\Dockerfile" />
<None Include="extensions\logspout\logspout-compose.yml" />
<None Include="extensions\logspout\modules.go" />
<None Include="extensions\logspout\README.md" />
<None Include="extensions\metricbeat\.dockerignore" />
<None Include="extensions\metricbeat\config\metricbeat.yml" />
<None Include="extensions\metricbeat\Dockerfile" />
<None Include="extensions\metricbeat\metricbeat-compose.yml" />
<None Include="extensions\metricbeat\README.md" />
<None Include="extensions\README.md" />
<None Include="kibana\.dockerignore" />
<None Include="kibana\config\kibana.yml" />
<None Include="kibana\Dockerfile" />
<None Include="logstash\.dockerignore" />
<None Include="logstash\config\logstash.yml" />
<None Include="logstash\Dockerfile" />
<None Include="logstash\pipeline\logstash.conf" />
<None Include="setup\.dockerignore" />
<None Include="setup\.gitignore" />
<None Include="setup\Dockerfile" />
<None Include="setup\entrypoint.sh" />
<None Include="setup\lib.sh" />
<None Include="setup\roles\filebeat_writer.json" />
<None Include="setup\roles\heartbeat_writer.json" />
<None Include="setup\roles\logstash_writer.json" />
<None Include="setup\roles\metricbeat_writer.json" />
</ItemGroup>
</Project>
170 changes: 170 additions & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,3 +6,173 @@ services:
build:
context: .
dockerfile: host/YANLib.HttpApi.Host/Dockerfile

setup:
build:
context: setup/
args:
ELASTIC_VERSION: ${ELASTIC_VERSION}
init: true
container_name: setup
volumes:
- ./setup/entrypoint.sh:/entrypoint.sh:ro,Z
- ./setup/lib.sh:/lib.sh:ro,Z
- ./setup/roles:/roles:ro,Z
- setup:/state:Z
environment:
ELASTIC_PASSWORD: ${ELASTIC_PASSWORD:-}
LOGSTASH_INTERNAL_PASSWORD: ${LOGSTASH_INTERNAL_PASSWORD:-}
KIBANA_SYSTEM_PASSWORD: ${KIBANA_SYSTEM_PASSWORD:-}
METRICBEAT_INTERNAL_PASSWORD: ${METRICBEAT_INTERNAL_PASSWORD:-}
FILEBEAT_INTERNAL_PASSWORD: ${FILEBEAT_INTERNAL_PASSWORD:-}
HEARTBEAT_INTERNAL_PASSWORD: ${HEARTBEAT_INTERNAL_PASSWORD:-}
MONITORING_INTERNAL_PASSWORD: ${MONITORING_INTERNAL_PASSWORD:-}
BEATS_SYSTEM_PASSWORD: ${BEATS_SYSTEM_PASSWORD:-}
RABBITMQ_DEFAULT_USER: ${RABBITMQ_DEFAULT_USER:-}
RABBITMQ_DEFAULT_PASS: ${RABBITMQ_DEFAULT_PASS:-}
KAFKA_CLIENT_USERS: ${KAFKA_CLIENT_USERS:-}
KAFKA_CLIENT_PASSWORDS: ${KAFKA_CLIENT_PASSWORDS:-}
networks:
- demo
depends_on:
- elasticsearch

elasticsearch:
build:
context: elasticsearch/
args:
ELASTIC_VERSION: ${ELASTIC_VERSION}
container_name: elasticsearch
volumes:
- ./elasticsearch/config/elasticsearch.yml:/usr/share/elasticsearch/config/elasticsearch.yml:ro,Z
- elasticsearch:/usr/share/elasticsearch/data:Z
ports:
- 9200:9200
- 9300:9300
environment:
ES_JAVA_OPTS: -Xms512m -Xmx512m
# Bootstrap password.
# Used to initialize the keystore during the initial startup of
# Elasticsearch. Ignored on subsequent runs.
ELASTIC_PASSWORD: ${ELASTIC_PASSWORD:-}
# Use single node discovery in order to disable production mode and avoid bootstrap checks.
# see: https://www.elastic.co/guide/en/elasticsearch/reference/current/bootstrap-checks.html
networks:
- demo
restart: unless-stopped

logstash:
build:
context: logstash/
args:
ELASTIC_VERSION: ${ELASTIC_VERSION}
container_name: logstash
volumes:
- ./logstash/config/logstash.yml:/usr/share/logstash/config/logstash.yml:ro,Z
- ./logstash/pipeline:/usr/share/logstash/pipeline:ro,Z
- ./host/YANLib.HttpApi.Host/Logs:/usr/share/logstash/logs:ro,Z
ports:
- 9600:9600
- 5044:5044
- 50000:50000/tcp
- 50000:50000/udp
environment:
LS_JAVA_OPTS: -Xms256m -Xmx256m
LOGSTASH_INTERNAL_PASSWORD: ${LOGSTASH_INTERNAL_PASSWORD:-}
networks:
- demo
depends_on:
- elasticsearch
restart: unless-stopped

kibana:
build:
context: kibana/
args:
ELASTIC_VERSION: ${ELASTIC_VERSION}
container_name: kibana
volumes:
- ./kibana/config/kibana.yml:/usr/share/kibana/config/kibana.yml:ro,Z
ports:
- 5601:5601
environment:
KIBANA_SYSTEM_PASSWORD: ${KIBANA_SYSTEM_PASSWORD:-}
networks:
- demo
depends_on:
- elasticsearch
restart: unless-stopped

rabbitmq:
image: rabbitmq:3-management
container_name: rabbitmq
ports:
- 5672:5672
- 15672:15672
environment:
RABBITMQ_DEFAULT_USER: ${RABBITMQ_DEFAULT_USER:-}
RABBITMQ_DEFAULT_PASS: ${RABBITMQ_DEFAULT_PASS:-}
networks:
- demo
depends_on:
- logstash
restart: unless-stopped

zookeeper:
image: bitnami/zookeeper:latest
container_name: zookeeper
ports:
- 2181:2181
environment:
- ALLOW_ANONYMOUS_LOGIN=yes
networks:
- demo
depends_on:
- logstash
restart: unless-stopped

kafka:
image: bitnami/kafka:latest
container_name: kafka
ports:
- 9092:9092
- 9093:9093
environment:
KAFKA_BROKER_ID: 1
KAFKA_CFG_ZOOKEEPER_CONNECT: zookeeper:2181
KAFKA_CFG_LISTENER_SECURITY_PROTOCOL_MAP: CLIENT:PLAINTEXT,EXTERNAL:PLAINTEXT
KAFKA_CFG_LISTENERS: CLIENT://:9093,EXTERNAL://:9092
KAFKA_CFG_ADVERTISED_LISTENERS: CLIENT://kafka:9093,EXTERNAL://localhost:9092
KAFKA_CFG_INTER_BROKER_LISTENER_NAME: CLIENT
ALLOW_PLAINTEXT_LISTENER: yes
KAFKA_CLIENT_USERS: ${KAFKA_CLIENT_USERS:-}
KAFKA_CLIENT_PASSWORDS: ${KAFKA_CLIENT_PASSWORDS:-}
networks:
- demo
depends_on:
- zookeeper
restart: unless-stopped

kafka-ui:
image: provectuslabs/kafka-ui:latest
container_name: kafka-ui
ports:
- 8080:8080
environment:
- KAFKA_CLUSTERS_0_NAME=local
- KAFKA_CLUSTERS_0_ZOOKEEPER=zookeeper:2181
- KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS=kafka:9093
networks:
- demo
depends_on:
- zookeeper
- kafka
restart: unless-stopped

networks:
demo:
driver: bridge

volumes:
setup:
elasticsearch:
6 changes: 6 additions & 0 deletions elasticsearch/.dockerignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
# Ignore Docker build files
Dockerfile
.dockerignore

# Ignore OS artifacts
**/.DS_Store
7 changes: 7 additions & 0 deletions elasticsearch/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
ARG ELASTIC_VERSION

# https://www.docker.elastic.co/
FROM docker.elastic.co/elasticsearch/elasticsearch:${ELASTIC_VERSION}

# Add your elasticsearch plugins setup here
# Example: RUN elasticsearch-plugin install analysis-icu
20 changes: 20 additions & 0 deletions elasticsearch/config/elasticsearch.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
---
## Default Elasticsearch configuration from Elasticsearch base image.
## https://github.com/elastic/elasticsearch/blob/main/distribution/docker/src/docker/config/elasticsearch.yml
#
cluster.name: docker-cluster
network.host: 0.0.0.0

node.name: elasticsearch

discovery.type: single-node

## X-Pack settings
## see https://www.elastic.co/guide/en/elasticsearch/reference/current/security-settings.html
#
xpack.license.self_generated.type: trial
xpack.security.enabled: true

## Set the built-in users' passwords.
# Run the following command from the Elasticsearch directory:
# ./bin/elasticsearch-setup-passwords interactive
3 changes: 3 additions & 0 deletions extensions/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
# Extensions

Third-party extensions that enable extra integrations with the Elastic stack.
6 changes: 6 additions & 0 deletions extensions/curator/.dockerignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
# Ignore Docker build files
Dockerfile
.dockerignore

# Ignore OS artifacts
**/.DS_Store
9 changes: 9 additions & 0 deletions extensions/curator/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
FROM untergeek/curator:8.0.2

USER root

RUN >>/var/spool/cron/crontabs/nobody \
echo '* * * * * /curator/curator /.curator/delete_log_files_curator.yml'

ENTRYPOINT ["crond"]
CMD ["-f", "-d8"]
20 changes: 20 additions & 0 deletions extensions/curator/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
# Curator

Elasticsearch Curator helps you curate or manage your indices.

## Usage

If you want to include the Curator extension, run Docker Compose from the root of the repository with an additional
command line argument referencing the `curator-compose.yml` file:

```bash
$ docker-compose -f docker-compose.yml -f extensions/curator/curator-compose.yml up
```

This sample setup demonstrates how to run `curator` every minute using `cron`.

All configuration files are available in the `config/` directory.

## Documentation

[Curator Reference](https://www.elastic.co/guide/en/elasticsearch/client/curator/current/index.html)
13 changes: 13 additions & 0 deletions extensions/curator/config/curator.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
# Curator configuration
# https://www.elastic.co/guide/en/elasticsearch/client/curator/current/configfile.html

elasticsearch:
client:
hosts: [ http://elasticsearch:9200 ]
other_settings:
username: elastic
password: ${ELASTIC_PASSWORD}

logging:
loglevel: INFO
logformat: default
Loading

0 comments on commit d1f34e8

Please sign in to comment.