Dear community, could anyone please guide me, what is wrong with my config:
auth_enabled: false
server:
http_listen_port: {{ .Values.loki.containerPorts.http }}
grpc_listen_port: {{ .Values.loki.containerPorts.grpc }}
http_server_read_timeout: "1h"
http_server_write_timeout: "5m"
http_server_idle_timeout: "5m"
grpc_server_max_recv_msg_size: 41943040
grpc_server_max_send_msg_size: 41943040
log_level: debug
common:
compactor_address: http://{{ include "grafana-loki.compactor.fullname" . }}:{{ .Values.compactor.service.ports.http }}
distributor:
ring:
kvstore:
store: memberlist
memberlist:
join_members:
- {{ include "grafana-loki.gossip-ring.fullname" . }}
ingester:
lifecycler:
ring:
kvstore:
store: memberlist
replication_factor: 1
chunk_idle_period: 30m
chunk_block_size: 262144
chunk_encoding: snappy
chunk_retain_period: 1m
wal:
dir: {{ .Values.loki.dataDir }}/wal
limits_config:
retention_period: 36h
reject_old_samples: true
reject_old_samples_max_age: 68h
max_cache_freshness_per_query: 10m
split_queries_by_interval: 30m
max_query_parallelism: 8
allow_structured_metadata: true
schema_config:
configs:
- from: 2020-10-24
store: boltdb-shipper
object_store: filesystem
schema: v11
index:
prefix: index_
period: 24h
- from: 2024-03-12
store: tsdb
object_store: filesystem
schema: v12
index:
period: 24h
prefix: index_
- from: 2024-04-23
object_store: filesystem
store: tsdb
schema: v13
index:
prefix: index_
period: 24h
storage_config:
boltdb_shipper:
active_index_directory: {{ .Values.loki.dataDir }}/loki/index
cache_location: {{ .Values.loki.dataDir }}/loki/cache
cache_ttl: 168h
{{- if .Values.indexGateway.enabled }}
index_gateway_client:
server_address: {{ (printf "dns:///%s:9095" (include "grafana-loki.index-gateway.fullname" .)) }}
{{- end }}
filesystem:
directory: {{ .Values.loki.dataDir }}/chunks
index_queries_cache_config:
{{- if .Values.memcachedindexqueries.enabled }}
memcached:
batch_size: 100
parallelism: 100
memcached_client:
consistent_hash: true
addresses: dns+{{ include "grafana-loki.memcached-index-queries.host" . }}
service: http
{{- end }}
tsdb_shipper:
active_index_directory: {{ .Values.loki.dataDir }}/loki/tsdb-index
cache_location: {{ .Values.loki.dataDir }}/loki/tsdb-cache
{{- if .Values.indexGateway.enabled }}
index_gateway_client:
server_address: {{ (printf "dns:///%s:9095" (include "grafana-loki.index-gateway.fullname" .)) }}
{{- end }}
query_scheduler:
max_outstanding_requests_per_tenant: 2048
querier:
max_concurrent: 16
chunk_store_config:
{{- if .Values.memcachedchunks.enabled }}
chunk_cache_config:
memcached:
batch_size: 100
parallelism: 100
memcached_client:
consistent_hash: true
addresses: dns+{{ include "grafana-loki.memcached-chunks.host" . }}
{{- end }}
{{- if .Values.memcachedindexwrites.enabled }}
write_dedupe_cache_config:
memcached:
batch_size: 100
parallelism: 100
memcached_client:
consistent_hash: true
addresses: dns+{{ include "grafana-loki.memcached-index-writes.host" . }}
{{- end }}
table_manager:
retention_deletes_enabled: false
retention_period: 0s
query_range:
parallelise_shardable_queries: false
align_queries_with_step: true
max_retries: 5
cache_results: true
results_cache:
cache:
{{- if .Values.memcachedfrontend.enabled }}
memcached_client:
consistent_hash: true
addresses: dns+{{ include "grafana-loki.memcached-frontend.host" . }}
max_idle_conns: 16
timeout: 500ms
update_interval: 1m
{{- else }}
embedded-cache:
enabled: true
max_size_items: 1024
validity: 24h
{{- end }}
{{- if not .Values.queryScheduler.enabled }}
frontend_worker:
frontend_address: {{ include "grafana-loki.query-frontend.fullname" . }}:{{ .Values.queryFrontend.service.ports.grpc }}
{{- end }}
frontend:
log_queries_longer_than: 5s
compress_responses: true
tail_proxy_url: http://{{ include "grafana-loki.querier.fullname" . }}:{{ .Values.querier.service.ports.http }}
compactor:
working_directory: {{ .Values.loki.dataDir }}/loki/retention
compaction_interval: 10m
retention_enabled: true
retention_delete_delay: 2h
retention_delete_worker_count: 150
delete_request_store: filesystem
ruler:
storage:
type: local
local:
directory: {{ .Values.loki.dataDir }}/conf/rules
ring:
kvstore:
store: memberlist
rule_path: /tmp/loki/scratch
alertmanager_url: https://alertmanager.xx
external_url: https://alertmanager.xx
All pods starting well, and I can perform queries in grafana.
But on some queries, like:
sum by(path) (count_over_time({app="$app", pod=~"$pod", msg=~"Request validation error.*"} [$__auto]))
I’m getting an error:
failed to load chunk 'fake/89ac1eeb4dfd38f0/MThmZWQ0YTRiMTk6MThmZWQ3NGI1MGQ6YTFkMDI3NTc=': open /bitnami/grafana-loki/chunks/fake/89ac1eeb4dfd38f0/MThmZWQ0YTRiMTk6MThmZWQ3NGI1MGQ6YTFkMDI3NTc=: no such file or directory
it happens with all range functions like rate, count_over_time, etc.
I do see errors in logs in grafana-loki-gateway:
api-firewall-grafana-loki-query-frontend-b94b698b5-62hz2 level=error ts=2024-06-06T13:59:12.782522911Z caller=retry.go:95 org_id=fake traceID=4d875331af9f1043 msg="error processing request" try=1 query="sum by (path)(count_over_time({app=\"api-firewall\", pod=~\".*\", msg=~\"Shadow API: undefined parameters found\"}[1h]))" query_hash=865803805 start=2024-06-06T13:59:11.84Z end=2024-06-06T13:59:11.84Z start_delta=942.514911ms end_delta=942.515601ms length=0s retry_in=1.511103519s err="rpc error: code = Code(500) desc = failed to load chunk 'fake/c2838354379276cd/MThmZWQ4NjU0OWY6MThmZWRhYjY3N2Y6YWIxMWYzNGM=': open //grafana-loki/chunks/fake/c2838354379276cd/MThmZWQ4NjU0OWY6MThmZWRhYjY3N2Y6YWIxMWYzNGM=: no such file or directory"
api-firewall-grafana-loki-query-frontend-b94b698b5-62hz2 level=error ts=2024-06-06T13:59:15.027658792Z caller=retry.go:95 org_id=fake traceID=4d875331af9f1043 msg="error processing request" try=2 query="sum by (path)(count_over_time({app=\"api-firewall\", pod=~\".*\", msg=~\"Shadow API: undefined parameters found\"}[1h] offset 1h0m0s))" query_hash=2834255071 start=2024-06-06T13:59:11.84Z end=2024-06-06T13:59:11.84Z start_delta=3.187652568s end_delta=3.187653082s length=0s retry_in=4.365943429s err="rpc error: code = Code(500) desc = failed to load chunk 'fake/d4f620a5d3a8b02d/MThmZWQ0N2JlMmU6MThmZWQ3MDc2NWY6NzJhYjUzNDU=': open //grafana-loki/chunks/fake/d4f620a5d3a8b02d/MThmZWQ0N2JlMmU6MThmZWQ3MDc2NWY6NzJhYjUzNDU=: no such file or directory"
api-firewall-grafana-loki-query-frontend-b94b698b5-62hz2 level=error ts=2024-06-06T13:59:15.861341747Z caller=retry.go:95 org_id=fake traceID=4d875331af9f1043 msg="error processing request" try=2 query="sum by (path)(count_over_time({app=\"api-firewall\", pod=~\".*\", msg=~\"Shadow API: undefined parameters found\"}[1h]))" query_hash=865803805 start=2024-06-06T13:59:11.84Z end=2024-06-06T13:59:11.84Z start_delta=4.02133455s end_delta=4.021335003s length=0s retry_in=4.125772618s err="rpc error: code = Code(500) desc = failed to load chunk 'fake/f31df9b987ce323b/MThmZWQ5ZWM0NTI6MThmZWRhM2NkZjY6N2UwYzYxNDg=': open //grafana-loki/chunks/fake/f31df9b987ce323b/MThmZWQ5ZWM0NTI6MThmZWRhM2NkZjY6N2UwYzYxNDg=: no such file or directory"
api-firewall-grafana-loki-query-frontend-b94b698b5-62hz2 level=error ts=2024-06-06T13:59:15.974877155Z caller=retry.go:95 org_id=fake traceID=4d875331af9f1043 msg="error processing request" try=2 query="sum by (path)(count_over_time({app=\"api-firewall\", pod=~\".*\", msg=~\"Shadow API: undefined parameters found\"}[1h] offset 2h0m0s))" query_hash=3075849878 start=2024-06-06T13:59:11.84Z end=2024-06-06T13:59:11.84Z start_delta=4.134870268s end_delta=4.134870751s length=0s retry_in=4.432957889s err="rpc error: code = Code(500) desc = failed to load chunk 'fake/1578df082a04901c/MThmZWQyYzQ2MTM6MThmZWQ5OTFkNmE6NzI3NzY5Y2E=': open //grafana-loki/chunks/fake/1578df082a04901c/MThmZWQyYzQ2MTM6MThmZWQ5OTFkNmE6NzI3NzY5Y2E=: no such file or directory"
logs from gateway:
api-firewall-grafana-loki-gateway-5c7cbfc8b4-kwv2w 10.10.6.11 - - [06/Jun/2024:14:53:30 +0000] 500 "GET /loki/api/v1/query_range?direction=backward&end=1717685593679000000&query=max+by%28level%29+%28count_over_time%28%7Bapp%3D%22api-firewall%22%2C+pod%3D~%22.%2A%22%7D+%5B20s%5D%29%29&start=1717674780000000000&step=20000ms HTTP/1.1" 218 "-" "Grafana/10.4.1" "-"
api-firewall-grafana-loki-gateway-5c7cbfc8b4-kwv2w 10.10.6.11 - - [06/Jun/2024:14:53:30 +0000] 500 "GET /loki/api/v1/query_range?direction=backward&end=1717685593681000000&query=rate%28%7Blevel%3D%22error%22%2C+app%3D%22api-firewall%22%2C+msg%3D~%22Request+validation+error.%2A%22%7D+%5B30s%5D%29&start=1717674780000000000&step=30000ms HTTP/1.1" 218 "-" "Grafana/10.4.1" "-"
api-firewall-grafana-loki-gateway-5c7cbfc8b4-kwv2w 10.10.6.11 - - [06/Jun/2024:14:53:30 +0000] 500 "GET /loki/api/v1/query_range?direction=backward&end=1717685593680000000&query=max+by%28method%29+%28count_over_time%28%7Bpod%3D~%22.%2A%22%2C+app%3D%22api-firewall%22%2C+method%21%3D%22%22%7D+%5B30s%5D%29%29&start=1717674780000000000&step=30000ms HTTP/1.1" 218 "-" "Grafana/10.4.1" "-"
api-firewall-grafana-loki-gateway-5c7cbfc8b4-kwv2w 10.10.6.11 - - [06/Jun/2024:14:53:31 +0000] 500 "GET /loki/api/v1/query_range?direction=backward&end=1717685593681000000&query=sum+by%28msg%29+%28rate%28%7Bapp%3D%22api-firewall%22%2C+msg%21~%22%5E%28main%7Chandler%7CSending%7CReceived%29.%2A%22%7D+%5B1m%5D%29%29&start=1717674780000000000&step=60000ms HTTP/1.1" 218 "-" "Grafana/10.4.1" "-"
api-firewall-grafana-loki-gateway-5c7cbfc8b4-kwv2w 10.10.6.11 - - [06/Jun/2024:14:53:31 +0000] 500 "GET /loki/api/v1/query_range?direction=backward&end=1717685593665000000&query=sum%28count_over_time%28%7Bpod%3D~%22.%2A%22%2C+app%3D%22api-firewall%22%2C+msg%3D%22Error+while+proxying+request%22%7D+%5B2m%5D%29%29&start=1717642320000000000&step=120000ms HTTP/1.1" 218 "-" "Grafana/10.4.1" "-"
api-firewall-grafana-loki-gateway-5c7cbfc8b4-kwv2w 10.10.6.11 - - [06/Jun/2024:14:53:47 +0000] 500 "GET /loki/api/v1/query?direction=backward&limit=1000&query=sum+by%28path%29+%28count_over_time%28%7Bapp%3D%22api-firewall%22%2C+pod%3D~%22.%2A%22%2C+msg%3D~%22Shadow+API%3A+undefined+parameters+found%22%7D+%5B21600s%5D%29%29&time=1717685593682000000 HTTP/1.1" 218 "-" "Grafana/10.4.1" "-"
api-firewall-grafana-loki-gateway-5c7cbfc8b4-kwv2w 10.10.6.11 - - [06/Jun/2024:14:53:48 +0000] 500 "GET /loki/api/v1/query_range?direction=backward&end=1717685593682000000&query=rate%28%7Bapp%3D%22api-firewall%22%2C+pod%3D~%22.%2A%22%2C+msg%3D%22Shadow+API%3A+undefined+parameters+found%22%7D+%5B10m%5D%29&start=1717674780000000000&step=30000ms HTTP/1.1" 218 "-" "Grafana/10.4.1" "-"
api-firewall-grafana-loki-gateway-5c7cbfc8b4-kwv2w 10.10.6.11 - - [06/Jun/2024:14:53:48 +0000] 500 "GET /loki/api/v1/query?direction=backward&limit=1000&query=sum+by%28path%29+%28count_over_time%28%7Bapp%3D%22api-firewall%22%2C+pod%3D~%22.%2A%22%2C+msg%3D~%22Request+validation+error.%2A%22%7D+%5B21600s%5D%29%29&time=1717685593681000000 HTTP/1.1" 218 "-" "Grafana/10.4.1" "-"
thanks in advance!