Hello!
I recently encountered an issue while trying to ‘Query the archive using Loki in read-only mode.’ The archived logs were exported from Cloud Logs Exporter. The issue is characterized by an ‘empty ring’ error occurring within the Loki container.
I’m reaching out to seek your valuable insights and assistance in resolving this issue.
Here are the details of my Loki container settings:
loki-query-archive.yaml:
auth_enabled: true
server:
http_listen_port: 3100
http_server_read_timeout: 10m
http_server_write_timeout: 10m
memberlist:
join_members:
- loki:7946
querier:
query_timeout: 10m
query_store_only: true
common:
path_prefix: /loki
replication_factor: 1
compactor_address: loki:3100
storage:
s3:
access_key_id: ***
secret_access_key: ***
session_token: ***
bucketnames: ***
region: ***
schema_config:
configs:
- from: "2020-07-31"
store: tsdb
object_store: s3
schema: v12
index:
prefix: loki_prod_020_index_
period: 1d
tags: {}
chunks:
prefix: ""
period: 0s
tags: {}
row_shards: 16
- from: "2023-08-21"
store: tsdb
object_store: s3
schema: v13
index:
prefix: loki_prod_020_index_
period: 1d
tags: {}
chunks:
prefix: ""
period: 0s
tags: {}
row_shards: 16
docker-compose.query-archive.yaml:
version: "3.8"
services:
loki:
hostname: loki
image: grafana/loki:latest
command: "-config.file=/etc/loki/config.yaml -target=querier "
ports:
- "3100:3100"
- 7946
- 9095
volumes:
- ./loki-query-archive.yaml:/etc/loki/config.yaml:ro
networks:
- grafana-loki
grafana:
image: grafana/grafana:9.2.0-beta1
environment:
- GF_PATHS_PROVISIONING=/etc/grafana/provisioning
- GF_AUTH_ANONYMOUS_ENABLED=true
- GF_AUTH_ANONYMOUS_ORG_ROLE=Admin
depends_on:
- loki
entrypoint:
- sh
- -euc
- |
mkdir -p /etc/grafana/provisioning/datasources
cat <<EOF > /etc/grafana/provisioning/datasources/ds.yaml
apiVersion: 1
datasources:
- name: Loki
type: loki
access: proxy
url: http://loki:3100
jsonData:
timeout: 600
httpHeaderName1: "X-Scope-OrgID"
secureJsonData:
httpHeaderValue1: "*****"
EOF
/run.sh
ports:
- "3000:3000"
networks:
- grafana-loki
networks:
grafana-loki: {}
Loki Container Log:
level=warn ts=2023-11-30T07:19:35.589644156Z caller=loki.go:288 msg="global timeout not configured, using default engine timeout (\"5m0s\"). This behavior will change in
s use the default global timeout (\"5m\")."
level=info ts=2023-11-30T07:19:35.592174259Z caller=main.go:108 msg="Starting Loki" version="(version=2.9.2, branch=HEAD, revision=a17308db6)"
level=info ts=2023-11-30T07:19:35.592931185Z caller=server.go:322 http=[::]:3100 grpc=[::]:9095 msg="server listening on addresses"
level=warn ts=2023-11-30T07:19:35.593812625Z caller=cache.go:127 msg="fifocache config is deprecated. use embedded-cache instead"
level=warn ts=2023-11-30T07:19:35.593835865Z caller=experimental.go:20 msg="experimental feature in use" feature="In-memory (FIFO) cache - chunksembedded-cache"
level=info ts=2023-11-30T07:19:35.594333922Z caller=table_manager.go:271 index-store=tsdb-2020-07-31 msg="query readiness setup completed" duration=1.506µs distinct_users_len=0 distinct_users=
level=info ts=2023-11-30T07:19:35.594359598Z caller=shipper.go:165 index-store=tsdb-2020-07-31 msg="starting index shipper in RO mode"
level=info ts=2023-11-30T07:19:35.59450187Z caller=table_manager.go:271 index-store=tsdb-2023-08-21 msg="query readiness setup completed" duration=797ns distinct_users_len=0 distinct_users=
level=info ts=2023-11-30T07:19:35.594510722Z caller=shipper.go:165 index-store=tsdb-2023-08-21 msg="starting index shipper in RO mode"
level=info ts=2023-11-30T07:19:35.594950951Z caller=worker.go:112 msg="Starting querier worker using query-scheduler and scheduler ring for addresses"
level=info ts=2023-11-30T07:19:35.596178527Z caller=memberlist_client.go:434 msg="Using memberlist cluster label and node name" cluster_label= node=loki-327deff6
level=info ts=2023-11-30T07:19:35.596712152Z caller=memberlist_client.go:540 msg="memberlist fast-join starting" nodes_found=1 to_join=4
level=info ts=2023-11-30T07:19:35.597163176Z caller=module_service.go:82 msg=initialising module=analytics
level=info ts=2023-11-30T07:19:35.597344217Z caller=module_service.go:82 msg=initialising module=cache-generation-loader
level=info ts=2023-11-30T07:19:35.597635741Z caller=module_service.go:82 msg=initialising module=server
level=info ts=2023-11-30T07:19:35.597876118Z caller=memberlist_client.go:560 msg="memberlist fast-join finished" joined_nodes=1 elapsed_time=1.165471ms
level=info ts=2023-11-30T07:19:35.597889537Z caller=memberlist_client.go:573 msg="joining memberlist cluster" join_members=loki:7946
level=info ts=2023-11-30T07:19:35.59796497Z caller=module_service.go:82 msg=initialising module=memberlist-kv
level=info ts=2023-11-30T07:19:35.598067645Z caller=module_service.go:82 msg=initialising module=query-scheduler-ring
level=info ts=2023-11-30T07:19:35.598193964Z caller=ring.go:273 msg="ring doesn't exist in KV store yet"
level=info ts=2023-11-30T07:19:35.598269989Z caller=module_service.go:82 msg=initialising module=ring
level=info ts=2023-11-30T07:19:35.598283276Z caller=ring.go:273 msg="ring doesn't exist in KV store yet"
level=info ts=2023-11-30T07:19:35.598303969Z caller=module_service.go:82 msg=initialising module=ingester-querier
level=info ts=2023-11-30T07:19:35.598315235Z caller=module_service.go:82 msg=initialising module=store
level=info ts=2023-11-30T07:19:35.598356154Z caller=memberlist_client.go:592 msg="joining memberlist cluster succeeded" reached_nodes=1 elapsed_time=466.466µs
level=info ts=2023-11-30T07:19:35.598385436Z caller=module_service.go:82 msg=initialising module=querier
level=info ts=2023-11-30T07:19:35.598440322Z caller=loki.go:505 msg="Loki started"
level=error ts=2023-11-30T07:19:38.599481548Z caller=ring_watcher.go:56 component=querier-scheduler-worker msg="error getting addresses from ring" err="empty ring"
level=error ts=2023-11-30T07:19:41.5987232Z caller=ring_watcher.go:56 component=querier-scheduler-worker msg="error getting addresses from ring" err="empty ring"
level=error ts=2023-11-30T07:19:44.599555289Z caller=ring_watcher.go:56 component=querier-scheduler-worker msg="error getting addresses from ring" err="empty ring"
level=error ts=2023-11-30T07:19:47.598475441Z caller=ring_watcher.go:56 component=querier-scheduler-worker msg="error getting addresses from ring" err="empty ring"
While I continue to explore potential solutions, I thought it best to connect with this fantastic community for any advice or suggestions.
Thank you all in advance for your time and assistance. Looking forward to hearing from the community.
Best regards,
Masaki Hirano