Loki logs unable to reach tempo datasource

We’ve configured grafana, loki, tempo and promtail as docker containers in a compose file,
docker-compose.yaml

version: '3.9'

x-logging:
  &default-logging
  driver: "json-file"
  options:
    max-size: "1m"
    max-file: "1"
    tag: "{{.Name}}"

services:
  grafana:
    image: grafana/grafana:11.0.0-ubuntu   #grafana/grafana:10.4.2
    container_name: grafana
    ports:
      - 3000:3000
    environment:
      - GF_AUTH_ANONYMOUS_ENABLED=true
      - GF_AUTH_ANONYMOUS_ORG_ROLE=Admin
      - GF_USERS_DEFAULT_THEME=light
    volumes:
      - ./configs/grafana/datasources.yaml:/etc/grafana/provisioning/datasources/datasources.yaml:ro
    depends_on:
      - tempo
    networks:
      - traces
    logging: *default-logging

  tempo:
    image: grafana/tempo:latest #grafana/tempo:2.3.1
    container_name: tempo
    ports:
      - 3200:3200   # tempo http
      - 9095:9095   # tempo grpc
      - 14268:14268 # jaeger ingest
      - 4318:4318   # otlp http
      - 4317:4317   # otlp grpc
    volumes:
      - ./configs/tempo/tempo-local.yaml:/etc/tempo-local.yaml
    command: ["-config.file=/etc/tempo-local.yaml"]
    networks:
      - traces
    logging: *default-logging

  promtail:
    image: grafana/promtail:main #grafana/promtail:2.9.7
    container_name: promtail
    volumes:
      - ./configs/promtail/promtail.yaml:/etc/promtail/docker-config.yaml
      - /var/lib/docker/containers:/var/lib/docker/containers:ro
      - /var/run/docker.sock:/var/run/docker.sock
    command: -config.file=/etc/promtail/docker-config.yaml
    depends_on:
      - loki
    networks:
      - traces
    logging: *default-logging

  loki:
    image: grafana/loki:main #grafana/loki:2.9.7
    container_name: loki
    ports:
      - 3100:3100
    command: -config.file=/etc/loki/local-config.yaml
    networks:
      - traces
    logging: *default-logging

networks:
  traces:
    name: traces
    driver: bridge

promtail.yaml
server:
  http_listen_port: 9080
  grpc_listen_port: 0

positions:
  filename: /tmp/positions.yaml

clients:
  - URL: <loki endpoint>

scrape_configs:
  - job_name: flog_scrape 
    docker_sd_configs:
      - host: unix:///var/run/docker.sock
        refresh_interval: 5s
        filters:
          - name: label
            values: ["logging=promtail"] 
    relabel_configs:
      - source_labels: ['__meta_docker_container_name']
        regex: '/(.*)'
        target_label: 'container'
      - source_labels: ['__meta_docker_container_log_stream']
        target_label: 'logstream'
      - source_labels: ['__meta_docker_container_label_logging_jobname']
        target_label: 'job'
    pipeline_stages:
      - cri: {}
      - multiline:
        #firstline: ^\d{4}-\d{2}-\d{2} \d{1,2}:\d{2}:\d{2},\d{3}
          firstline: '(?P<time>\\d{2}:\\d{2}:\\d{2}\\.\\d{3}) \\[.*?\\] \\[traceId=(\\w+), spanId=(\\w+)\\] \\S+ \\S+:\\d+ \\| .*'
          max_wait_time: 3s
      # https://grafana.com/docs/loki/latest/clients/promtail/stages/json/
      - json:
          expressions:
            #message: message
            level: level
            #output: 'message'

tempo-local.yaml

server:
  http_listen_port: 3200

distributor:
  receivers:
    jaeger:
      protocols:
        grpc:
        thrift_compact:
        thrift_binary:
        thrift_http:
    otlp:
      protocols:
        http:
          endpoint: 0.0.0.0:55681
        grpc:
          endpoint: 0.0.0.0:4317

ingester:
  trace_idle_period: 10s
  max_block_bytes: 1_000_000
  max_block_duration: 5m

compactor:
  compaction:
    block_retention: 48h
    compacted_block_retention: 1h

storage:
  trace:
    backend: local
    local:
      path: /var/tempo/traces
    block:
      bloom_filter_false_positive: 0.05
      v2_index_downsample_bytes: 1000
      v2_encoding: zstd

metrics_generator:
  registry:
    external_labels:
      source: tempo
      cluster: docker-compose
  storage:
    path: /tmp/tempo/generator/wal
    remote_write:
      - url: http://prometheus:9090/api/v1/write
        send_exemplars: true

overrides:
  defaults:
    metrics_generator:
      processors: [service-graphs, span-metrics] # enables metrics generator
        
#overrides:
#  per_tenant_override_config: /conf/overrides.yaml

Here’s also the app configuration in the logback.xml file

  <appender name="LOKI" class="com.github.loki4j.logback.Loki4jAppender"> 
        <http>
            <url>http://localhost:3100/loki/api/v1/push</url>
        </http>
        <format>
        <label>
            <pattern>app=${appName},host=${HOSTNAME},level=%level</pattern>
        </label>
        <message>
            <pattern>%d{HH:mm:ss.SSS} [%thread] [traceId=%X{traceId}, spanId=%X{spanId}] %-5level %file:%line | %msg%n</pattern>
        </message>
            <sortByTime>true</sortByTime>
        </format>
    </appender>
 
    <root level="INFO">
        <appender-ref ref="LOKI"/>
        <appender-ref ref="STDOUT" />
    </root>

The application logs get streamed to loki, however, the logs with traceID and span ID are not visible in the tempo datasource

Trace != Log. You won’t be able to insert log into trace storage (Tempo) and trace into log storage (Loki) officially. So your state is correct.

Generate traces (not logs) from your app and ingest them into Tempo.

Thanks for your quick reply, Jangaraj, we’re sending traces to Loki from our app, I would also like to add a screenshot of the console,

This is not a trace:

This is a log, which contains trace id.

So you are not sending traces to Loki. I recommend to check a doc to understand difference between log and trace.

@jangaraj Could you also please let us know how we can send traces to Loki instead of logs? Are there any app-level configurations required? Any guidance would be appreciated.

This topic was automatically closed 365 days after the last reply. New replies are no longer allowed.