Update logstash container image and remove ELK stack (#15744)

* Migrate to new image for logstash container

* Remove ELK stack tooling I will not maintain
This commit is contained in:
Alan Rominger 2025-01-15 07:43:38 -05:00 committed by GitHub
parent f89be5ec8b
commit c45eb43d63
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
12 changed files with 1 additions and 331 deletions

View File

@ -589,24 +589,9 @@ docker-clean-volumes: docker-compose-clean docker-compose-container-group-clean
docker-refresh: docker-clean docker-compose
## Docker Development Environment with Elastic Stack Connected
docker-compose-elk: awx/projects docker-compose-sources
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml -f tools/elastic/docker-compose.logstash-link.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate
docker-compose-cluster-elk: awx/projects docker-compose-sources
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml -f tools/elastic/docker-compose.logstash-link-cluster.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate
docker-compose-container-group:
MINIKUBE_CONTAINER_GROUP=true $(MAKE) docker-compose
clean-elk:
docker stop tools_kibana_1
docker stop tools_logstash_1
docker stop tools_elasticsearch_1
docker rm tools_logstash_1
docker rm tools_elasticsearch_1
docker rm tools_kibana_1
VERSION:
@echo "awx: $(VERSION)"

View File

@ -1,4 +1,4 @@
FROM logstash:5-alpine
FROM mirror.gcr.io/library/logstash:5-alpine
COPY logstash.conf /
RUN touch /logstash.log
RUN chown logstash:logstash /logstash.log

View File

@ -1,121 +0,0 @@
# Docker ELK / Elastic Stack Development Tools
These are tools to run a containerized version of ELK stack, comprising
of Logstash, Elastic Search, and Kibana.
A copy of the license is in `licenses/docker-elk.txt`
## Instructions
Due to complex requirements from the elastic search container upstream, there
is a prerequisite to get the containers running. The docker _host_ machine
must have the `max_map_count` variable increased. For a developer using
docker-machine with something like VirtualBox of VMWare, this can be
done by getting via bash in the running Docker machine. Example:
```bash
docker-machine ssh default sudo sysctl -w vm.max_map_count=262144
```
> Note: If you are running docker natively on linux, you need only run `sysctl -w vm.max_map_count=262144`
After this, the containers can be started up with commands like:
```bash
make docker-compose-elk
```
```bash
make docker-compose-cluster-elk
```
These are ran from the root folder of the ansible-tower repository.
Kibana is the visualization service, and it can be accessed in a web browser
by going to `{server address}:5601`.
### Authentication
The default HTTPS logstash configuration makes use of basic auth, so a username
and password is needed in HTTPS configuration, in addition to the other
parameters. The following settings are supported:
```
{
"LOG_AGGREGATOR_HOST": "logstash",
"LOG_AGGREGATOR_PORT": 8085,
"LOG_AGGREGATOR_TYPE": "logstash",
"LOG_AGGREGATOR_USERNAME": "awx_logger",
"LOG_AGGREGATOR_PASSWORD": "workflows",
"LOG_AGGREGATOR_LOGGERS": [
"awx",
"activity_stream",
"job_events",
"system_tracking"
],
"LOG_AGGREGATOR_INDIVIDUAL_FACTS": false,
"LOG_AGGREGATOR_ENABLED": true,
"LOG_AGGREGATOR_PROTOCOL": "https",
"LOG_AGGREGATOR_TCP_TIMEOUT": 5
}
```
and
```
{
"LOG_AGGREGATOR_HOST": "logstash",
"LOG_AGGREGATOR_PORT": 8086,
"LOG_AGGREGATOR_TYPE": "logstash",
"LOG_AGGREGATOR_LOGGERS": [
"awx",
"activity_stream",
"job_events",
"system_tracking"
],
"LOG_AGGREGATOR_INDIVIDUAL_FACTS": false,
"LOG_AGGREGATOR_ENABLED": true,
"LOG_AGGREGATOR_PROTOCOL": "udp",
"LOG_AGGREGATOR_TCP_TIMEOUT": 5
}
```
and
```
{
"LOG_AGGREGATOR_HOST": "logstash",
"LOG_AGGREGATOR_PORT": 8087,
"LOG_AGGREGATOR_TYPE": "logstash",
"LOG_AGGREGATOR_LOGGERS": [
"awx",
"activity_stream",
"job_events",
"system_tracking"
],
"LOG_AGGREGATOR_INDIVIDUAL_FACTS": false,
"LOG_AGGREGATOR_ENABLED": true,
"LOG_AGGREGATOR_PROTOCOL": "tcp",
"LOG_AGGREGATOR_TCP_TIMEOUT": 5
}
```
These can be entered via the settings endpoint by making a POST to
`/api/v2/settings/logging/`.
### Connecting Logstash to 3rd Party Receivers
In order to send these logs to an external consumer of logstash format
messages, replace the output variables in the logstash.conf file.
```
output {
elasticsearch {
hosts => "elasticsearch:9200"
}
}
```
## Changelog
Current branch point `a776151221182dcfaec7df727459e208c895d25b`
Nov 18, 2016
- Original branch point `b5a4deee142b152d4f9232ebac5bbabb2d2cef3c`
Sep 25, 2016, before X-Pack support

View File

@ -1,49 +0,0 @@
---
# Structure for the Elastic Stack docker configuration came from docker-elk:
# https://github.com/deviantony/docker-elk
# docker-elk is under the MIT License,
# a copy of its license is provided in licenses/docker-elk.txt
# contents modified
version: '2'
services:
# Components of ELK stack for logging
elasticsearch:
build: ../../elastic/elasticsearch/
ports:
- "9200:9200"
- "9300:9300"
environment:
ES_JAVA_OPTS: "-Xms1g -Xmx1g"
# networks: # add back in when a connection to tower_tools is possible
# - docker_elk
logstash:
build: ../../elastic/logstash/
command: -f /etc/logstash/conf.d/
volumes:
- ../../elastic/logstash/config:/etc/logstash/conf.d
ports:
- "8085:8085"
links:
- elasticsearch
# networks:
# - docker_elk
depends_on:
- elasticsearch
kibana:
build: ../../elastic/kibana/
volumes:
- ../../elastic/kibana/config/:/opt/kibana/config/
ports:
- "5601:5601"
links:
- elasticsearch
# networks:
# - docker_elk
depends_on:
- elasticsearch
# networks:
# docker_elk:
# driver: bridge

View File

@ -1,13 +0,0 @@
---
version: '2'
services:
# AWX Development Cluster
tower_1:
links:
- logstash
tower_2:
links:
- logstash
tower_3:
links:
- logstash

View File

@ -1,7 +0,0 @@
---
version: '2'
services:
# Primary AWX Development Container
awx:
links:
- logstash

View File

@ -1,5 +0,0 @@
FROM elasticsearch:5
ENV ES_JAVA_OPTS="-Des.path.conf=/etc/elasticsearch"
CMD ["-E", "network.host=0.0.0.0", "-E", "discovery.zen.minimum_master_nodes=1"]

View File

@ -1 +0,0 @@
Ensure the existence of the parent folder.

View File

@ -1 +0,0 @@
FROM kibana:5

View File

@ -1,93 +0,0 @@
---
# Kibana is served by a back end server. This setting specifies the port to use.
server.port: 5601
# This setting specifies the IP address of the back end server.
server.host: "0.0.0.0"
# Enables you to specify a path to mount Kibana at if you are running behind a proxy. This setting
# cannot end in a slash.
# server.basePath: ""
# The maximum payload size in bytes for incoming server requests.
# server.maxPayloadBytes: 1048576
# The Kibana server's name. This is used for display purposes.
# server.name: "your-hostname"
# The URL of the Elasticsearch instance to use for all your queries.
elasticsearch.url: "http://elasticsearch:9200"
# When this settings value is true Kibana uses the hostname specified in the server.host
# setting. When the value of this setting is false, Kibana uses the hostname of the host
# that connects to this Kibana instance.
# elasticsearch.preserveHost: true
# Kibana uses an index in Elasticsearch to store saved searches, visualizations and
# dashboards. Kibana creates a new index if the index doesnt already exist.
# kibana.index: ".kibana"
# The default application to load.
# kibana.defaultAppId: "discover"
# If your Elasticsearch is protected with basic authentication, these settings provide
# the username and password that the Kibana server uses to perform maintenance on the Kibana
# index at startup. Your Kibana users still need to authenticate with Elasticsearch, which
# is proxied through the Kibana server.
# elasticsearch.username: "user"
# elasticsearch.password: "pass"
# Paths to the PEM-format SSL certificate and SSL key files, respectively. These
# files enable SSL for outgoing requests from the Kibana server to the browser.
# server.ssl.cert: /path/to/your/server.crt
# server.ssl.key: /path/to/your/server.key
# Optional settings that provide the paths to the PEM-format SSL certificate and key files.
# These files validate that your Elasticsearch backend uses the same key files.
# elasticsearch.ssl.cert: /path/to/your/client.crt
# elasticsearch.ssl.key: /path/to/your/client.key
# Optional setting that enables you to specify a path to the PEM file for the certificate
# authority for your Elasticsearch instance.
# elasticsearch.ssl.ca: /path/to/your/CA.pem
# To disregard the validity of SSL certificates, change this settings value to false.
# elasticsearch.ssl.verify: true
# Time in milliseconds to wait for Elasticsearch to respond to pings. Defaults to the value of
# the elasticsearch.requestTimeout setting.
# elasticsearch.pingTimeout: 1500
# Time in milliseconds to wait for responses from the back end or Elasticsearch. This value
# must be a positive integer.
# elasticsearch.requestTimeout: 30000
# List of Kibana client-side headers to send to Elasticsearch. To send *no* client-side
# headers, set this value to [] (an empty list).
# elasticsearch.requestHeadersWhitelist: [ authorization ]
# Time in milliseconds for Elasticsearch to wait for responses from shards. Set to 0 to disable.
# elasticsearch.shardTimeout: 0
# Time in milliseconds to wait for Elasticsearch at Kibana startup before retrying.
# elasticsearch.startupTimeout: 5000
# Specifies the path where Kibana creates the process ID file.
# pid.file: /var/run/kibana.pid
# Enables you specify a file where Kibana stores log output.
# logging.dest: stdout
# Set the value of this setting to true to suppress all logging output.
# logging.silent: false
# Set the value of this setting to true to suppress all logging output other than error messages.
# logging.quiet: false
# Set the value of this setting to true to log all events, including system usage information
# and all requests.
# logging.verbose: false
# Set the interval in milliseconds to sample system and process performance
# metrics. Minimum is 100ms. Defaults to 10000.
# ops.interval: 10000

View File

@ -1,4 +0,0 @@
FROM logstash:5
# Add your logstash plugins setup here
# Example: RUN logstash-plugin install logstash-filter-json

View File

@ -1,21 +0,0 @@
input {
http {
port => 8085
user => awx_logger
password => "workflows"
}
}
## Add your filters / logstash plugins configuration here
filter {
json {
source => "message"
}
}
output {
elasticsearch {
hosts => "elasticsearch:9200"
}
}