Skip to content
30 changes: 17 additions & 13 deletions lib/logflare/context_cache/supervisor.ex
Original file line number Diff line number Diff line change
Expand Up @@ -57,23 +57,27 @@ defmodule Logflare.ContextCache.Supervisor do
]
end

def list_caches do
def list_caches_with_metrics do
[
TeamUsers.Cache,
Partners.Cache,
Users.Cache,
Backends.Cache,
Sources.Cache,
Billing.Cache,
SourceSchemas.Cache,
Auth.Cache,
Endpoints.Cache,
Rules.Cache,
KeyValues.Cache,
SavedSearches.Cache
{TeamUsers.Cache, :team_users},
{Partners.Cache, :partners},
{Users.Cache, :users},
{Backends.Cache, :backends},
{Sources.Cache, :sources},
{Billing.Cache, :billing},
{SourceSchemas.Cache, :source_schemas},
{Auth.Cache, :auth},
{Endpoints.Cache, :endpoints},
{Rules.Cache, :rules},
{KeyValues.Cache, :key_values},
{SavedSearches.Cache, :saved_searches}
]
end

def list_caches do
Enum.map(list_caches_with_metrics(), fn {cache, _} -> cache end)
end

@doc """
Returns the publisher :via name used for syn registry.
"""
Expand Down
74 changes: 0 additions & 74 deletions lib/logflare/system_metrics/cachex/poller.ex

This file was deleted.

1 change: 0 additions & 1 deletion lib/logflare/system_metrics/system_metrics_sup.ex
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@ defmodule Logflare.SystemMetricsSup do
children = [
SystemMetrics.AllLogsLogged,
SystemMetrics.AllLogsLogged.Poller,
SystemMetrics.Cachex.Poller,
# telemetry poller
{
:telemetry_poller,
Expand Down
32 changes: 19 additions & 13 deletions lib/telemetry.ex
Original file line number Diff line number Diff line change
Expand Up @@ -6,15 +6,13 @@ defmodule Logflare.Telemetry do

def start_link(arg), do: Supervisor.start_link(__MODULE__, arg, name: __MODULE__)

context_caches_with_metrics = Logflare.ContextCache.Supervisor.list_caches_with_metrics()

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This evaluates to

[
  {Logflare.TeamUsers.Cache, :team_users},
  {Logflare.Partners.Cache, :partners},
  {Logflare.Users.Cache, :users},
  {Logflare.Backends.Cache, :backends},
  {Logflare.Sources.Cache, :sources},
  {Logflare.Billing.Cache, :billing},
  {Logflare.SourceSchemas.Cache, :source_schemas},
  {Logflare.Auth.Cache, :auth},
  {Logflare.Endpoints.Cache, :endpoints},
  {Logflare.Rules.Cache, :rules},
  {Logflare.SavedSearches.Cache, :saved_searches}
]

or should it be hardcoded?

Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

can you inverse the logic so that we can do list_contexts_to_cache() and in list_caches it will call list_contexts_to_cache/0 and concat the .Cache

Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

the module.split isn't very pretty 😆

Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

besides that, its fine with keeping it dynamic.

Copy link
Copy Markdown
Contributor Author

@ruslandoga ruslandoga Feb 24, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I went into a slightly different direction and added

  def list_caches_with_metrics do
    [
      {TeamUsers.Cache, :team_users},
      {Partners.Cache, :partners},
      {Users.Cache, :users},
      {Backends.Cache, :backends},
      {Sources.Cache, :sources},
      {Billing.Cache, :billing},
      {SourceSchemas.Cache, :source_schemas},
      {Auth.Cache, :auth},
      {Endpoints.Cache, :endpoints},
      {Rules.Cache, :rules},
      {KeyValues.Cache, :key_values},
      {SavedSearches.Cache, :saved_searches}
    ]
  end

I think it's a bit safer for atoms to exist in the source code.

@caches [
{Logflare.Logs.LogEvents.Cache, :log_events},
{Logflare.Logs.RejectedLogEvents, :rejected_log_events},
{Logflare.Sources.Cache, :sources},
{Logflare.SourceSchemas.Cache, :source_schemas},
{Logflare.PubSubRates.Cache, :pub_sub_rates},
{Logflare.Billing.Cache, :billing},
{Logflare.Users.Cache, :users}
]
{Logflare.Logs.LogEvents.Cache, :log_events},
{Logflare.Logs.RejectedLogEvents, :rejected_log_events},
{Logflare.PubSubRates.Cache, :pub_sub_rates}
] ++ context_caches_with_metrics

@process_metrics %{
memory: %{
Expand Down Expand Up @@ -76,6 +74,10 @@ defmodule Logflare.Telemetry do
last_value("cachex.#{metric}.evictions"),
last_value("cachex.#{metric}.expirations"),
last_value("cachex.#{metric}.operations"),
last_value("cachex.#{metric}.hits"),
last_value("cachex.#{metric}.misses"),
last_value("cachex.#{metric}.hit_rate"),
last_value("cachex.#{metric}.miss_rate"),
last_value("cachex.#{metric}.total_heap_size", unit: {:byte, :megabyte})
]
end)
Expand Down Expand Up @@ -264,11 +266,15 @@ defmodule Logflare.Telemetry do
|> Process.info(:total_heap_size)

metrics = %{
purge: Map.get(stats, :purge),
stats: Map.get(stats, :stats),
evictions: Map.get(stats, :evictions),
expirations: Map.get(stats, :expirations),
operations: Map.get(stats, :operations),
purge: Map.get(stats, :purge, 0),
stats: Map.get(stats, :stats, 0),
evictions: Map.get(stats, :evictions, 0),
expirations: Map.get(stats, :expirations, 0),
operations: Map.get(stats, :operations, 0),
hits: Map.get(stats, :hits, 0),
misses: Map.get(stats, :misses, 0),
hit_rate: Map.get(stats, :hit_rate, 0),
miss_rate: Map.get(stats, :miss_rate, 0),
total_heap_size: total_heap_size
}

Expand Down
55 changes: 55 additions & 0 deletions test/logflare/telemetry_test.exs
Original file line number Diff line number Diff line change
Expand Up @@ -157,4 +157,59 @@ defmodule Logflare.TelemetryTest do
assert_receive {^event, ^ref, %{utilization: _}, %{name: ^id, type: "dirty (io)"}}
end
end

describe "cachex_metrics/0" do
test "retrieves and emits stats for caches with all metrics" do
expected_metrics = [
:log_events,
:rejected_log_events,
:pub_sub_rates,
:team_users,
:partners,
:users,
:backends,
:sources,
:billing,
:source_schemas,
:auth,
:endpoints,
:rules,
:key_values,
:saved_searches
]

events = Enum.map(expected_metrics, fn metric -> [:cachex, metric] end)
ref = :telemetry_test.attach_event_handlers(self(), events)
on_exit(fn -> :telemetry.detach(ref) end)

# simulates telemetry_poller tick
Telemetry.cachex_metrics()

for event <- events do
assert_receive {^event, ^ref, measurements, metadata}

assert measurements |> Map.keys() |> Enum.sort() == [
:evictions,
:expirations,
:hit_rate,
:hits,
:miss_rate,
:misses,
:operations,
:purge,
:stats,
:total_heap_size
]

for {key, value} <- measurements do
assert is_number(value),
"expected #{key} for #{inspect(event)} to be numeric, got: #{inspect(value)}"
end

assert metadata == %{}
end

refute_received _anything_else
end
end
end
Loading