$PWD/fluentd
--> logs
--> Dockerfile
--> fluent.conf
$PWD/docker-compose.yml
build, create, and start the services defined in a docker-compose.yaml file.
docker-compose up --build
stops and removes containers, networks, and volumes that were created by docker-compose up.
docker-compose down -v --remove-orphans
docker-compose up --build
stops and removes containers, networks, and volumes that were created by docker-compose up.
docker-compose down -v --remove-orphans
docker-compose.yml
version: '3.8'
services:
fluentd:
build:
context: ./fluentd
networks:
- custom-net
container_name: fluentd
volumes:
- ./fluentd/fluent.conf:/fluentd/etc/fluent.conf
- ./fluentd/logs:/var/log/fluentd
depends_on:
- kafka
kafka:
image: confluentinc/cp-kafka:latest
hostname: kafka
container_name: kafka
networks:
- custom-net
ports:
- "9092:9092"
- "9093:9093"
environment:
KAFKA_KRAFT_MODE: "true" # This enables KRaft mode in Kafka.
KAFKA_PROCESS_ROLES: controller,broker # Kafka acts as both broker and controller.
KAFKA_NODE_ID: 1 # A unique ID for this Kafka instance.
KAFKA_CONTROLLER_QUORUM_VOTERS: "1@localhost:9093" # Defines the controller voters.
KAFKA_LISTENERS: PLAINTEXT://0.0.0.0:9092,CONTROLLER://0.0.0.0:9093
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,CONTROLLER:PLAINTEXT
KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
KAFKA_CONTROLLER_LISTENER_NAMES: CONTROLLER
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092
#KAFKA_LOG_DIRS: /var/lib/kafka/data # Where Kafka stores its logs.
KAFKA_AUTO_CREATE_TOPICS_ENABLE: "true" # Kafka will automatically create topics if needed.
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 # Since we’re running one broker, one replica is enough.
KAFKA_LOG_RETENTION_HOURS: 168 # Keep logs for 7 days.
KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 # No delay for consumer rebalancing.
CLUSTER_ID: "Mk3OEYBSD34fcwNTJENDM2Qk"
networks:
custom-net:
name: fluent-net
Dockerfile
FROM fluent/fluentd:v1.16-1
USER root
# Install Kafka plugin
#RUN gem install fluent-plugin-kafka --no-document
RUN apk add --no-cache --update \
build-base \
ruby-dev \
libffi-dev \
&& gem install fluent-plugin-kafka --no-document \
&& apk del build-base ruby-dev libffi-dev
USER fluent
Fluentd.conf
<source>
@type tail
path /var/log/fluentd/input.log
pos_file /var/log/fluentd/input.pos
tag app.log
read_from_head true
<parse>
@type multiline
format_firstline /\d{4}-\d{1,2}-\d{1,2}/
format1 /^(?<time>\d{4}-\d{1,2}-\d{1,2} \d{1,2}:\d{1,2}:\d{1,2},\d{1,3}) (?<message>.*)/
</parse>
</source>
<match app.log>
@type kafka2
brokers kafka:9092
default_topic test-topic
output_data_type json
<format>
@type json
</format>
<buffer topic>
@type memory
flush_interval 5s
</buffer>
</match>
Login to kafka instance shell
docker exec -it kafka bash
Publish and consume messages
kafka-topics --bootstrap-server localhost:9092 --create --topic test-topic
kafka-console-producer --bootstrap-server localhost:9092 --topic test-topic
kafka-console-consumer -bootstrap-server localhost:9092 --topic test-topic --from-beginning
Login to fluentd instance shell
docker exec -it fluentd sh
No comments:
Post a Comment