After Loki version 3.0 I'm getting empty ring error

level=error ts=2024-11-28T01:38:19.450004096Z caller=ratestore.go:109 msg="error getting ingester clients" err="empty ring"

Here is my loki config

auth_enabled: false
                analytics:
                  reporting_enabled: false
                server:
                  http_listen_address: 127.0.0.1
                  http_listen_port: 3100
                  grpc_server_max_recv_msg_size: 10485760
                  grpc_server_max_send_msg_size: 10485760
                  log_level: error
                common:
                  instance_addr: 127.0.0.1
                  path_prefix: /mnt/loki_storage
                  storage:
                    filesystem:
                      chunks_directory: /mnt/loki_storage/chunks
                      rules_directory: /mnt/loki_storage/rules
                  replication_factor: 1
                  ring:
                    kvstore:
                      store: inmemory
                chunk_store_config:
                  chunk_cache_config:
                    embedded_cache:
                      enabled: true
                      max_size_mb: 500
                      ttl: 24h
                query_range:
                  parallelise_shardable_queries: false
                  results_cache:
                    cache:
                      embedded_cache:
                        enabled: true
                        max_size_mb: 500
                frontend:
                  max_outstanding_per_tenant: 4096
                  compress_responses: true
                compactor:
                  working_directory: /mnt/loki_storage/retention
                  delete_request_store: filesystem
                  compaction_interval: 3m
                  retention_delete_delay: 1m
                  delete_request_cancel_period: 10s
                  retention_enabled: true
                  retention_delete_worker_count: 150
                schema_config:
                  configs:
                    - from: 2024-01-01
                      store: tsdb
                      object_store: filesystem
                      schema: v13
                      index:
                        prefix: index_
                        period: 24h
                limits_config:
                  retention_period: 168h
                  split_queries_by_interval: 0
                  query_timeout: 5m
                  reject_old_samples_max_age: 2w
                  per_stream_rate_limit: 64MB
                  per_stream_rate_limit_burst: 128MB
                  ingestion_burst_size_mb: 100
                  ingestion_rate_mb: 100
                  max_entries_limit_per_query: 10000
                  max_query_bytes_read: 268435456

Same error with promtail in docker-compose:

loki config:

auth_enabled: false

target: querier,distributor,query-frontend,ruler

server:
  http_listen_port: 3100

ingester:
  lifecycler:
    address: 127.0.0.1
    ring:
      kvstore:
        store: inmemory
      replication_factor: 1
    final_sleep: 0s
  chunk_idle_period: 5m
  chunk_retain_period: 30s

schema_config:
  configs:
    - from: 2020-01-01
      store: boltdb
      object_store: filesystem
      schema: v11
      index:
        prefix: index_
        period: 24h

storage_config:
  boltdb:
    directory: /loki/index
  filesystem:
    directory: /loki/chunks

ruler:
  storage:
    type: local
    local:
      directory: /loki/rules
  rule_path: /loki/rules
  alertmanager_url: http://localhost:9093

limits_config:
  ingestion_rate_strategy: global
  ingestion_rate_mb: 4
  ingestion_burst_size_mb: 6
  max_concurrent_tail_requests: 10
  max_global_streams_per_user: 0
  retention_period: 168h

promtail config:

server:
  http_listen_port: 9080
  grpc_listen_port: 0

positions:
  filename: /tmp/positions.yaml

clients:
  - url: http://loki:3100/loki/api/v1/push

scrape_configs:
  - job_name: containers
    docker_sd_configs:
      - host: unix:///var/run/docker.sock
        refresh_interval: 5s
    relabel_configs:
      - source_labels: ['__meta_docker_container_name']
        regex: '/(.*)'
        target_label: 'container'
      - source_labels: ['__meta_docker_container_log_stream']
        target_label: 'logstream'
      - source_labels: ['__meta_docker_container_label_com_docker_compose_service']
        target_label: 'service'
    pipeline_stages:
      - json:
          expressions:
            level: level
            message: message
      - timestamp:
          source: time
          format: RFC3339Nano
      - output:
          source: message

  - job_name: services
    static_configs:
      - targets:
          - localhost
        labels:
          job: containerlogs
          __path__: /var/log/*log
    pipeline_stages:
      - json:
          expressions:
            output: log
            stream: stream
            timestamp: time
      - timestamp:
          source: timestamp
          format: RFC3339Nano
      - output:
          source: output

part of docker-compose:

  loki:
    image: grafana/loki:3.5.2
    container_name: flylady-loki
    ports:
      - "3100:3100"
    command: -config.file=/etc/loki/local-config.yaml -validation.allow-structured-metadata=false
    volumes:
      - loki_data:/loki
      - ./monitoring/local-config.yaml:/etc/loki/local-config.yaml
    networks:
      - flylady-network

  promtail:
    image: grafana/promtail:latest
    container_name: flylady-promtail
    command: -config.file=/etc/promtail/config.yml
    volumes:
      - ./monitoring/promtail-config.yml:/etc/promtail/config.yml
      - /var/log:/var/log
      - /var/lib/docker/containers:/var/lib/docker/containers:ro
      - /var/run/docker.sock:/var/run/docker.sock
    networks:
      - flylady-network
    depends_on:
      - loki

promtail log contain:

level=warn ts=2025-10-23T12:25:55.809064894Z caller=client.go:419 component=client host=loki:3100 msg="error sending batch, will retry" status=500 tenant= error="server returned HTTP status 500 Internal Server Error (500): empty ring"

loki log contain:

level=error ts=2025-10-23T12:33:26.482752503Z caller=ratestore.go:109 msg="error getting ingester clients" err="empty ring"