Skip to content

Vector Basic server

docker-compose.yml

services:
  elastic:
    image: elasticsearch:${ELK_VERSION}
    container_name: elastic
    restart: unless-stopped
    ports:
      - ${ES_PORT}:9200
    volumes:
      - /etc/localtime:/etc/localtime:ro
      - es_data:/usr/share/elasticsearch/data
      - ./etc/elasticsearch.yml:/usr/share/elasticsearch/config/elasticsearch.yml:ro
    environment:
      discovery.type: single-node
      ES_JAVA_OPTS: ${ES_JAVA_OPTS}
      logger.level: ${ES_LOGGER_LEVEL:-INFO}
      ELASTIC_USERNAME: ${ELASTIC_USERNAME}
      ELASTIC_PASSWORD: ${ELASTIC_PASSWORD}
    healthcheck:
      test: curl -s ${ELASTICSEARCH_HOSTS:-http://localhost:9200} >/dev/null 2>&1 || exit 1
      interval: 10s
      timeout: 10s
      retries: 60

  kibana:
    image: kibana:${ELK_VERSION}
    container_name: kibana
    restart: unless-stopped
    volumes:
      - /etc/localtime:/etc/localtime:ro
      - ./etc/kibana.yml:/usr/share/kibana/config/kibana.yml:ro
    ports:
      - ${KB_PORT}:5601
    environment:
      ELASTICSEARCH_HOSTS: "${ELASTICSEARCH_HOSTS:-http://elastic:9200}"
      XPACK_ENCRYPTEDSAVEDOBJECTS_ENCRYPTIONKEY: "${ELASTICSEARCH_ENCRYPTIONKEY}"
      KBN_LOGGING_ROOT_LEVEL: "info"
    depends_on:
      elastic:
        condition: service_healthy

  vector:
    image: timberio/vector:${VECTOR_VERSION}
    container_name: vector
    restart: unless-stopped
    environment:
      LOGSTASH_USERNAME: ${LOGSTASH_USERNAME}
      LOGSTASH_PASSWORD: ${LOGSTASH_PASSWORD}
      VECTOR_LOG: "info"
    ports:
      - "${LOGSTASH_PORT}:5044"
    volumes:
      - /etc/localtime:/etc/localtime:ro
      - ./etc/vector.yaml:/etc/vector/vector.yaml:ro
    depends_on:
      elastic:
        condition: service_healthy

networks:
  elk:
    driver: bridge

volumes:
  es_data:
    driver: local

vector.yaml

sources:
  vector_input:
    type: vector
    address: 0.0.0.0:5045

transforms:
  parse_message:
    type: remap
    inputs: ["vector_input"]
    source: |
      if exists(.message) {
        parsed, err = parse_json(.message)
        if err == null {
          . = merge!(. , parsed);
          del(.message)
        }
      }

  default_fields:
    type: remap
    inputs: ["parse_message"]
    source: |
      .metadata_index_name = "vector"

  combine_index_name:
    type: remap
    inputs: ["default_fields"]
    source: |
      if exists(.label."com.docker.compose.service") {
        .metadata_index_name = to_string!(.label."com.docker.compose.service")
      }

      if exists(.label."com.docker.compose.project") {
        .metadata_index_name = to_string!(.label."com.docker.compose.project") + "-"+ to_string!(.metadata_index_name)
      }

      if exists(.label."env") {
        .metadata_index_name = to_string!(.label."env") + "-" + to_string!(.metadata_index_name)
        .env = .label."env"
      }

  combine_timestamp:
    type: remap
    inputs: ["combine_index_name"]
    source: |
      if exists(.timestamp) {
        .@timestamp = parse_timestamp!(.timestamp, format: "%+");
        del(.timestamp)
      }

  output_data:
    type: remap
    inputs: ["combine_timestamp"]
    source: |
      del(.label) # docker labels
      del(.container_id)
      del(.container_created_at)
      del(.timeStamp)
      del(.source_type)
      del(.stream)
      if exists(.container_name) {
        del(.host)
      }

sinks:
  #debug_console:
  #  type: console
  #  inputs: ["output_data"]
  #  encoding:
  #    codec: json

  elastic:
    type: elasticsearch
    inputs: ["output_data"]
    endpoints: ["http://elastic:9200"]
    auth:
      strategy: basic
      user: ${LOGSTASH_USERNAME}
      password: ${LOGSTASH_PASSWORD}
    bulk:
      action: index
      index: "{{ .metadata_index_name }}.%Y-%m"
    mode: bulk
    encoding:
      except_fields:
        - metadata_processing
        - metadata_index_name

    buffer:
      type: memory
      max_events: 10000
      when_full: block

Agent configuration vector-agent-docker