)"` when the id is set but the association is
+ `nil` after preload (method deleted after the request ran), and the raw
+ auth type / `"None"` when no client auth was configured for the request.
+ """
+ def format_client_auth(%{client_webhook_auth_method_id: nil} = channel_request) do
+ format_auth_type(channel_request.client_auth_type)
+ end
+
+ def format_client_auth(
+ %{client_webhook_auth_method: %{name: name}} = channel_request
+ ) do
+ "#{name} (#{format_auth_type(channel_request.client_auth_type)})"
+ end
+
+ def format_client_auth(channel_request) do
+ "(deleted) (#{format_auth_type(channel_request.client_auth_type)})"
+ end
+
+ @doc """
+ Formats the destination credential used for a channel request for display.
+
+ Returns the credential name when present, `"(deleted)"` when the id is
+ still set but the credential has been deleted, and `"None"` when no
+ destination credential was configured.
+ """
+ def format_destination_auth(%{destination_credential_id: nil}), do: "None"
+
+ def format_destination_auth(%{
+ destination_credential: %{credential: %{name: name}}
+ }),
+ do: name
+
+ def format_destination_auth(_channel_request), do: "(deleted)"
+
+ def format_bytes(nil), do: "—"
+
+ def format_bytes(bytes) when bytes < 1024,
+ do: "#{bytes} B"
+
+ def format_bytes(bytes) when bytes < 1_048_576,
+ do: "#{Float.round(bytes / 1024, 1)} KB"
+
+ def format_bytes(bytes),
+ do: "#{Float.round(bytes / 1_048_576, 1)} MB"
+
+ def format_us(nil), do: "—"
+
+ def format_us(us) when is_number(us) do
+ ms = us / 1000
+
+ if ms == Float.round(ms),
+ do: trunc(ms) |> to_string(),
+ else: Float.round(ms, 1) |> to_string()
+ end
+
+ # --- Content type utilities ---
+
+ def extract_content_type(nil), do: nil
+
+ def extract_content_type(headers) do
+ headers
+ |> Enum.find(fn [name, _] -> String.downcase(name) == "content-type" end)
+ |> case do
+ [_, value] -> value
+ nil -> nil
+ end
+ end
+
+ def text_content_type?(ct) do
+ String.contains?(ct, "text/") or
+ String.contains?(ct, "json") or
+ String.contains?(ct, "xml") or
+ String.contains?(ct, "javascript") or
+ String.contains?(ct, "html")
+ end
+
+ def format_content_type_label(ct) when is_binary(ct) do
+ cond do
+ String.contains?(ct, "json") -> "JSON"
+ String.contains?(ct, "xml") -> "XML"
+ String.contains?(ct, "html") -> "HTML"
+ String.contains?(ct, "text/") -> "TEXT"
+ true -> ct
+ end
+ end
+
+ def format_content_type_label(_), do: nil
+end
diff --git a/lib/lightning_web/live/channel_request_live/show.ex b/lib/lightning_web/live/channel_request_live/show.ex
new file mode 100644
index 00000000000..a5afbcde29c
--- /dev/null
+++ b/lib/lightning_web/live/channel_request_live/show.ex
@@ -0,0 +1,399 @@
+defmodule LightningWeb.ChannelRequestLive.Show do
+ use LightningWeb, :live_view
+
+ import LightningWeb.ChannelRequestLive.Components
+
+ import LightningWeb.ChannelRequestLive.Timing,
+ only: [timing_section: 1]
+
+ alias Lightning.Channels
+ alias LightningWeb.ChannelRequestLive.Helpers
+
+ on_mount {LightningWeb.Hooks, :project_scope}
+
+ @impl true
+ def mount(%{"id" => id}, _session, socket) do
+ %{current_user: current_user, project: project} = socket.assigns
+
+ if Lightning.Accounts.experimental_features_enabled?(current_user) do
+ case Channels.get_channel_request_for_project(project.id, id) do
+ nil ->
+ {:ok, redirect(socket, to: ~p"/projects/#{project}/history")}
+
+ channel_request ->
+ {:ok,
+ assign(socket,
+ active_menu_item: :runs,
+ page_title: "Channel Request",
+ request_id: id,
+ channel_request: channel_request
+ )}
+ end
+ else
+ {:ok, redirect(socket, to: ~p"/projects/#{project}/history")}
+ end
+ end
+
+ @impl true
+ def render(assigns) do
+ ~H"""
+
+ <:header>
+
+ <:breadcrumbs>
+
+
+
+
+ <:label>
+ Channel Request
+
+ {display_short_uuid(@request_id)}
+
+
+
+
+
+
+
+
+
+ <% cr = @channel_request %>
+ <% event = Helpers.primary_event(cr) %>
+ <% error_cat =
+ event && event.error_message && Helpers.error_category(event.error_message) %>
+
+ <.summary_card
+ channel_request={cr}
+ event={event}
+ channel={cr.channel}
+ error_category={error_cat}
+ />
+
+ <.timing_section :if={error_cat != :credential} event={event} />
+
+
+ <.request_section event={event} />
+ <.response_section event={event} error_category={error_cat} />
+
+
+ <.context_section
+ channel_request={cr}
+ snapshot={cr.channel_snapshot}
+ channel={cr.channel}
+ />
+
+
+
+ """
+ end
+
+ # --- Summary Card ---
+
+ defp summary_card(assigns) do
+ ~H"""
+
+
+ <.method_badge method={@event && @event.request_method} />
+ <.request_path_display event={@event} />
+ <.status_code_display status={@event && @event.response_status} />
+ <.state_pill_with_tooltip
+ state={@channel_request.state}
+ error_message={@event && @event.error_message}
+ />
+
+
+
+ Channel
+ <.link
+ navigate={
+ ~p"/projects/#{@channel.project_id}/channels/#{@channel.id}/edit"
+ }
+ class="text-primary-600 hover:text-primary-800"
+ >
+ {@channel.name}
+
+
+
+ Request
+
+ {String.slice(@channel_request.id, 0..7)}
+
+ <.copy_icon_button
+ id="copy-request-id"
+ value={@channel_request.id}
+ title="Copy request ID"
+ size={3}
+ />
+
+
+
+
+
+
+
+ Client
+
+
+
+
-
+ IP
+
+ -
+ {@channel_request.client_identity || "—"}
+
+
+
+
-
+ Auth
+
+ -
+ <.icon
+ name="hero-shield-check"
+ class="h-4 w-4 shrink-0 text-secondary-400 mt-0.5"
+ />
+ {Helpers.format_client_auth(@channel_request)}
+
+
+
+
+
+
+
+ Destination
+
+
+
+
-
+ URL
+
+ -
+ {@channel.destination_url}
+
+
+
+
-
+ Auth
+
+ -
+ <.icon
+ name="hero-key"
+ class="h-4 w-4 shrink-0 text-secondary-400 mt-0.5"
+ />
+ {Helpers.format_destination_auth(@channel_request)}
+
+
+
+
+
+
+
+ Timing
+
+
+
+
-
+ Started
+
+ -
+
+
+
+
+
-
+ Completed
+
+ -
+
+
+
+
+
-
+ Latency
+
+ -
+ {if @event && @event.latency_us,
+ do: "#{Helpers.format_us(@event.latency_us)} ms",
+ else: "—"}
+
+
+
+
+
+
+ """
+ end
+
+ # --- Request Section ---
+
+ defp request_section(assigns) do
+ event = assigns.event
+
+ show_body =
+ event &&
+ not (is_nil(event.request_body_preview) and
+ is_nil(event.request_body_size))
+
+ assigns = assign(assigns, show_body: show_body)
+
+ ~H"""
+ <.disclosure_section
+ id="request-section"
+ title="Request"
+ open={true}
+ padded={false}
+ >
+ <:title_right>
+ <.section_size_badge
+ :if={@event && @event.request_body_size && @event.request_body_size > 0}
+ size={@event.request_body_size}
+ id="request-size-badge"
+ />
+
+ <%= if @event do %>
+ <.sub_section
+ :if={@event.request_headers}
+ id="req-headers"
+ title="Headers"
+ open={true}
+ >
+ <.headers_table headers={@event.request_headers} id="request-headers" />
+
+ <.sub_section :if={@show_body} id="req-body" title="Body" open={true}>
+ <:title_right>
+ 0}
+ class="text-[11px] text-secondary-400 font-mono"
+ >
+ {Helpers.format_bytes(@event.request_body_size)}
+
+
+ <.body_viewer
+ id="request-body"
+ body_preview={@event.request_body_preview}
+ body_hash={@event.request_body_hash}
+ body_size={@event.request_body_size}
+ headers={@event.request_headers}
+ />
+
+ <% end %>
+
+ """
+ end
+
+ # --- Response Section ---
+
+ defp response_section(assigns) do
+ event = assigns.event
+
+ show_body =
+ event && is_nil(assigns.error_category) &&
+ not (is_nil(event.response_body_preview) and
+ is_nil(event.response_body_size))
+
+ assigns = assign(assigns, show_body: show_body)
+
+ ~H"""
+ <.disclosure_section
+ id="response-section"
+ title="Response"
+ open={true}
+ padded={false}
+ >
+ <:title_right>
+ <.status_code_badge
+ :if={@event && @event.response_status}
+ status={@event.response_status}
+ />
+ <.section_size_badge
+ :if={@event && @event.response_body_size && @event.response_body_size > 0}
+ size={@event.response_body_size}
+ id="response-size-badge"
+ />
+
+ <%= if @error_category in [:transport, :credential] do %>
+ <.response_empty
+ type={@error_category}
+ error_code={@event.error_message}
+ human_message={Helpers.humanize_error(@event.error_message)}
+ />
+ <% else %>
+ <%= if @event do %>
+ <.sub_section
+ :if={@event.response_headers}
+ id="resp-headers"
+ title="Headers"
+ open={true}
+ >
+ <.headers_table headers={@event.response_headers} id="response-headers" />
+
+ <.sub_section :if={@show_body} id="resp-body" title="Body" open={true}>
+ <:title_right>
+ 0}
+ class="text-[11px] text-secondary-400 font-mono"
+ >
+ {Helpers.format_bytes(@event.response_body_size)}
+
+
+ <.body_viewer
+ id="response-body"
+ body_preview={@event.response_body_preview}
+ body_hash={@event.response_body_hash}
+ body_size={@event.response_body_size}
+ headers={@event.response_headers}
+ />
+
+ <% end %>
+ <% end %>
+
+ """
+ end
+
+ # --- Context Section ---
+
+ defp context_section(assigns) do
+ config_changed =
+ assigns.snapshot.lock_version != assigns.channel.lock_version
+
+ assigns = assign(assigns, config_changed: config_changed)
+
+ ~H"""
+ <.disclosure_section id="context-section" title="Context" open={false}>
+
+
+
+ Destination URL
+
+
+ {@snapshot.destination_url}
+
+
+
+
+ Channel Name
+
+ {@snapshot.name}
+
+
+
+ Config Version
+
+
+ {@snapshot.lock_version}
+
+ Config changed
+
+
+
+
+
+ """
+ end
+end
diff --git a/lib/lightning_web/live/channel_request_live/timing.ex b/lib/lightning_web/live/channel_request_live/timing.ex
new file mode 100644
index 00000000000..63efeee020a
--- /dev/null
+++ b/lib/lightning_web/live/channel_request_live/timing.ex
@@ -0,0 +1,396 @@
+defmodule LightningWeb.ChannelRequestLive.Timing do
+ @moduledoc """
+ Timing visualization components for the channel request detail page.
+
+ Renders a segmented timing bar with TTFB marker and legend,
+ computing phase breakdowns from Finch timing metrics.
+ """
+
+ use LightningWeb, :component
+
+ import LightningWeb.ChannelRequestLive.Components,
+ only: [disclosure_section: 1]
+
+ alias LightningWeb.ChannelRequestLive.Helpers
+
+ # --- Public components ---
+
+ def timing_section(assigns) do
+ event = assigns.event
+
+ timing_data =
+ if event do
+ compute_timing_segments(event)
+ else
+ nil
+ end
+
+ assigns = assign(assigns, timing_data: timing_data, event: event)
+
+ ~H"""
+
+ <.disclosure_section id="timing-section-disclosure" title="Timing" open={true}>
+
+ <.timing_bar timing_data={@timing_data} />
+ <.timing_legend timing_data={@timing_data} />
+
+
+
+ """
+ end
+
+ # --- Timing bar ---
+
+ @hatch_gradient_style IO.iodata_to_binary([
+ "background: repeating-linear-gradient(",
+ "-45deg, ",
+ "rgba(156, 163, 175, 0.18) 0px, ",
+ "rgba(156, 163, 175, 0.18) 3px, ",
+ "rgba(209, 213, 219, 0.55) 3px, ",
+ "rgba(209, 213, 219, 0.55) 6px)"
+ ])
+
+ defp timing_bar(assigns) do
+ segments = assigns.timing_data.segments
+ total_us = assigns.timing_data.total_us
+ ttfb_us = assigns.timing_data.ttfb_us
+
+ inner_total =
+ Enum.reduce(segments, 0, fn s, acc -> acc + s.us end)
+
+ inner_total = if inner_total == 0, do: 1, else: inner_total
+
+ segments_with_pct =
+ Enum.map(segments, fn s ->
+ Map.put(
+ s,
+ :pct,
+ max(Float.round(s.us / inner_total * 100, 1), 0.5)
+ )
+ end)
+
+ ttfb_pct =
+ if ttfb_us && ttfb_us > 0 && inner_total > 0 do
+ Float.round(ttfb_us / inner_total * 100, 1)
+ else
+ nil
+ end
+
+ tier = assigns.timing_data.tier
+ show_overhead = tier == :full
+ seg_count = length(segments_with_pct)
+
+ assigns =
+ assign(assigns,
+ segments: segments_with_pct,
+ seg_count: seg_count,
+ total_us: total_us,
+ ttfb_us: ttfb_us,
+ ttfb_pct: ttfb_pct,
+ show_overhead: show_overhead,
+ hatch_style: @hatch_gradient_style
+ )
+
+ ~H"""
+
+
+ <%!-- Outer bar: hatch background with inner segments on top --%>
+
+ <%!-- Inner phase segments --%>
+
+
+
+ {seg.badge}
+
+
+ {format_segment_label(seg)}
+
+
+
+ <%!-- TTFB marker line --%>
+
+
+
+
+
+
+ <.icon name="hero-arrow-up-mini" class="h-3 w-3 text-secondary-500" />
+
+ TTFB: {Helpers.format_us(@ttfb_us)} ms
+
+
+
+
+
+
+ 0 ms
+
+ {Helpers.format_us(@total_us)} ms
+
+
+
+ """
+ end
+
+ defp format_segment_label(%{us: us} = seg) do
+ ms = us / 1000
+
+ cond do
+ Map.has_key?(seg, :badge) -> ""
+ us == 0 -> ""
+ ms >= 1000 -> "#{Float.round(ms / 1000, 1)}s"
+ true -> "#{Helpers.format_us(us)}ms"
+ end
+ end
+
+ # --- Timing legend ---
+
+ defp timing_legend(assigns) do
+ timing_data = assigns.timing_data
+ segments = timing_data.segments
+ ttfb_us = timing_data.ttfb_us
+
+ show_overhead = timing_data.tier == :full
+
+ assigns =
+ assign(assigns,
+ segments: segments,
+ ttfb_us: ttfb_us,
+ show_overhead: show_overhead,
+ swatch_style: @hatch_gradient_style
+ )
+
+ ~H"""
+
+
+
+
+ {seg.label}
+
+
+
+
+ Proxy overhead
+
+
+ TTFB: {Helpers.format_us(@ttfb_us)} ms
+
+
+ """
+ end
+
+ # --- Timing computation ---
+
+ defp compute_timing_segments(event) do
+ cond do
+ is_nil(event.latency_us) ->
+ nil
+
+ has_finch_phases?(event) ->
+ compute_full_timing(event)
+
+ not is_nil(event.ttfb_us) ->
+ compute_ttfb_timing(event)
+
+ true ->
+ compute_minimal_timing(event)
+ end
+ end
+
+ defp has_finch_phases?(event) do
+ not is_nil(event.request_send_us) and not is_nil(event.ttfb_us) and
+ not is_nil(event.response_duration_us)
+ end
+
+ defp compute_full_timing(event) do
+ queue_us = event.queue_us || 0
+ connect_us = event.connect_us || 0
+ send_us = event.request_send_us
+ recv_us = event.response_duration_us
+ ttfb_us = event.ttfb_us
+ latency_us = event.latency_us
+
+ wait_us = max(ttfb_us - queue_us - connect_us - send_us, 0)
+
+ inner_sum = queue_us + connect_us + send_us + wait_us + recv_us
+
+ {overhead_left_pct, overhead_right_pct} =
+ compute_overhead(inner_sum, latency_us)
+
+ reused =
+ event.reused_connection == true and
+ (connect_us == 0 or is_nil(event.connect_us))
+
+ segments =
+ []
+ |> maybe_add_segment(queue_us > 0, %{
+ label: "Queue",
+ us: queue_us,
+ color: "bg-amber-300",
+ text_color: "text-amber-900"
+ })
+ |> maybe_add_connect_segment(connect_us, reused)
+ |> Kernel.++([
+ %{
+ label: "Send",
+ us: send_us,
+ color: "bg-blue-400",
+ text_color: "text-blue-900"
+ },
+ %{
+ label: "Processing",
+ us: wait_us,
+ color: "bg-gray-300",
+ text_color: "text-gray-700"
+ },
+ %{
+ label: "Recv",
+ us: recv_us,
+ color: "bg-green-400",
+ text_color: "text-green-900"
+ }
+ ])
+
+ %{
+ segments: segments,
+ total_us: latency_us,
+ ttfb_us: ttfb_us,
+ overhead_left_pct: overhead_left_pct,
+ overhead_right_pct: overhead_right_pct,
+ tier: :full
+ }
+ end
+
+ defp compute_ttfb_timing(event) do
+ download_us = max(event.latency_us - event.ttfb_us, 0)
+
+ segments = [
+ %{
+ label: "TTFB",
+ us: event.ttfb_us,
+ color: "bg-blue-400",
+ text_color: "text-blue-900"
+ },
+ %{
+ label: "Download",
+ us: download_us,
+ color: "bg-green-400",
+ text_color: "text-green-900"
+ }
+ ]
+
+ %{
+ segments: segments,
+ total_us: event.latency_us,
+ ttfb_us: event.ttfb_us,
+ overhead_left_pct: 0,
+ overhead_right_pct: 0,
+ tier: :partial
+ }
+ end
+
+ defp compute_minimal_timing(event) do
+ segments = [
+ %{
+ label: "Total",
+ us: event.latency_us,
+ color: "bg-blue-400",
+ text_color: "text-blue-900"
+ }
+ ]
+
+ %{
+ segments: segments,
+ total_us: event.latency_us,
+ ttfb_us: nil,
+ overhead_left_pct: 0,
+ overhead_right_pct: 0,
+ tier: :minimal
+ }
+ end
+
+ defp compute_overhead(inner_sum, latency_us)
+ when inner_sum >= latency_us or latency_us == 0 do
+ {0, 0}
+ end
+
+ defp compute_overhead(inner_sum, latency_us) do
+ gap_pct = (latency_us - inner_sum) / latency_us * 100
+ half = Float.round(gap_pct / 2, 1)
+ {half, half}
+ end
+
+ defp maybe_add_segment(segments, true, segment),
+ do: segments ++ [segment]
+
+ defp maybe_add_segment(segments, false, _segment), do: segments
+
+ defp maybe_add_connect_segment(segments, _connect_us, true) do
+ segments ++
+ [
+ %{
+ label: "Connect",
+ us: 0,
+ color: "bg-orange-400",
+ text_color: "text-orange-900",
+ badge: "(reused)"
+ }
+ ]
+ end
+
+ defp maybe_add_connect_segment(segments, connect_us, false)
+ when connect_us > 0 do
+ segments ++
+ [
+ %{
+ label: "Connect",
+ us: connect_us,
+ color: "bg-orange-400",
+ text_color: "text-orange-900"
+ }
+ ]
+ end
+
+ defp maybe_add_connect_segment(segments, _connect_us, false),
+ do: segments
+end
diff --git a/lib/lightning_web/live/run_live/channel_logs_component.ex b/lib/lightning_web/live/run_live/channel_logs_component.ex
index 343b65ff8bf..a1810e52f3f 100644
--- a/lib/lightning_web/live/run_live/channel_logs_component.ex
+++ b/lib/lightning_web/live/run_live/channel_logs_component.ex
@@ -151,9 +151,13 @@ defmodule LightningWeb.RunLive.ChannelLogsComponent do
<%= for entry <- @page.entries do %>
<.tr id={"request-#{entry.id}"}>
<.td>
-
+ <.link
+ navigate={~p"/projects/#{@project}/history/channels/#{entry.id}"}
+ class="link font-mono"
+ title={entry.request_id}
+ >
{display_short_uuid(entry.request_id)}
-
+
<.td class="text-sm text-gray-700">
{source_event_path(entry)}
diff --git a/lib/lightning_web/plugs/channel_proxy_plug.ex b/lib/lightning_web/plugs/channel_proxy_plug.ex
index dfa0cafc338..3b37e4eb1f6 100644
--- a/lib/lightning_web/plugs/channel_proxy_plug.ex
+++ b/lib/lightning_web/plugs/channel_proxy_plug.ex
@@ -44,6 +44,7 @@ defmodule LightningWeb.ChannelProxyPlug do
:forward_path,
:client_identity,
:auth_header,
+ :destination_credential_id,
client_auth_types: []
]
end
@@ -69,15 +70,15 @@ defmodule LightningWeb.ChannelProxyPlug do
defp do_proxy(conn, channel_id, rest) do
with {:ok, channel} <- fetch_channel_with_telemetry(channel_id),
- :ok <- authenticate_client(conn, channel) do
- proxy_with_auth(conn, channel, rest)
+ {:ok, matched_auth} <- authenticate_client(conn, channel) do
+ proxy_with_auth(conn, channel, rest, matched_auth)
else
:not_found -> error_response(conn, :not_found, "Not Found")
:unauthorized -> error_response(conn, :unauthorized, "Unauthorized")
end
end
- defp proxy_with_auth(conn, channel, rest) do
+ defp proxy_with_auth(conn, channel, rest, matched_auth) do
with {:ok, auth_header} <- resolve_destination_auth(channel),
{:ok, snapshot} <- Channels.get_or_create_current_snapshot(channel) do
client_auth_types =
@@ -93,11 +94,12 @@ defmodule LightningWeb.ChannelProxyPlug do
forward_path: build_forward_path(rest),
client_identity: get_client_identity(conn),
auth_header: auth_header,
- client_auth_types: client_auth_types
+ client_auth_types: client_auth_types,
+ destination_credential_id: destination_credential_id(channel)
}
conn
- |> proxy_upstream(req)
+ |> proxy_upstream(req, matched_auth)
|> halt()
else
{:credential_error, reason} ->
@@ -108,18 +110,36 @@ defmodule LightningWeb.ChannelProxyPlug do
end
end
+ defp destination_credential_id(channel) do
+ case channel.destination_auth_method do
+ %{project_credential_id: id} -> id
+ _ -> nil
+ end
+ end
+
+ defp authenticate_client(_conn, %{client_webhook_auth_methods: []}) do
+ {:ok, nil}
+ end
+
defp authenticate_client(conn, channel) do
methods = channel.client_webhook_auth_methods
- if methods == [] or
- Auth.valid_key?(conn, methods) or
- Auth.valid_user?(conn, methods) do
- :ok
- else
- :unauthorized
+ case find_matching_auth_method(conn, methods) do
+ %{} = method -> {:ok, method}
+ nil -> :unauthorized
end
end
+ defp find_matching_auth_method(conn, methods) do
+ Enum.find(methods, fn method ->
+ case method.auth_type do
+ :api -> Auth.valid_key?(conn, [method])
+ :basic -> Auth.valid_user?(conn, [method])
+ _ -> false
+ end
+ end)
+ end
+
defp fetch_channel_with_telemetry(channel_id) do
metadata = %{channel_id: channel_id}
@@ -133,15 +153,19 @@ defmodule LightningWeb.ChannelProxyPlug do
)
end
- defp proxy_upstream(conn, %DestinationRequest{} = req) do
- handler_state = %{
- channel: req.channel,
- snapshot: req.snapshot,
- request_id: req.request_id,
- started_at: DateTime.utc_now(),
- request_path: req.forward_path,
- client_identity: req.client_identity
- }
+ defp proxy_upstream(conn, %DestinationRequest{} = req, matched_auth) do
+ handler_state =
+ %{
+ channel: req.channel,
+ snapshot: req.snapshot,
+ request_id: req.request_id,
+ started_at: DateTime.utc_now(),
+ request_path: req.forward_path,
+ client_identity: req.client_identity,
+ query_string: conn.query_string,
+ destination_credential_id: req.destination_credential_id
+ }
+ |> put_auth_method(matched_auth)
metadata = %{
channel_id: req.channel.id,
@@ -159,7 +183,8 @@ defmodule LightningWeb.ChannelProxyPlug do
path: req.forward_path,
handler: {Lightning.Channels.Handler, handler_state},
strip_headers: build_strip_headers(req.client_auth_types),
- extra_headers: build_extra_headers(conn, req)
+ extra_headers: build_extra_headers(conn, req),
+ collect_timing: true
)
{result, metadata}
@@ -167,6 +192,15 @@ defmodule LightningWeb.ChannelProxyPlug do
)
end
+ defp put_auth_method(state, nil), do: state
+
+ defp put_auth_method(state, %{id: id, auth_type: auth_type}) do
+ Map.merge(state, %{
+ client_webhook_auth_method_id: id,
+ client_auth_type: Atom.to_string(auth_type)
+ })
+ end
+
defp build_extra_headers(conn, %DestinationRequest{} = req) do
xff =
case Plug.Conn.get_req_header(conn, "x-forwarded-for") do
@@ -256,7 +290,8 @@ defmodule LightningWeb.ChannelProxyPlug do
|> Plug.Conn.get_resp_header("x-request-id")
|> List.first(),
forward_path: conn.request_path,
- client_identity: get_client_identity(conn)
+ client_identity: get_client_identity(conn),
+ destination_credential_id: destination_credential_id(channel)
}
record_credential_error(conn, req, error_message)
@@ -280,6 +315,7 @@ defmodule LightningWeb.ChannelProxyPlug do
channel_snapshot_id: req.snapshot.id,
request_id: req.request_id,
client_identity: req.client_identity,
+ destination_credential_id: req.destination_credential_id,
state: :error,
started_at: now,
completed_at: now
diff --git a/lib/lightning_web/router.ex b/lib/lightning_web/router.ex
index b08cac16ed7..e8f64c1fe53 100644
--- a/lib/lightning_web/router.ex
+++ b/lib/lightning_web/router.ex
@@ -241,6 +241,7 @@ defmodule LightningWeb.Router do
live "/history", RunLive.Index, :index
live "/history/channels", RunLive.Index, :channel_logs
+ live "/history/channels/:id", ChannelRequestLive.Show, :show
live "/runs/:id", RunLive.Show, :show
live "/dataclips/:id/show", DataclipLive.Show, :show
diff --git a/mix.exs b/mix.exs
index 59f60fb8079..eec2da69ec5 100644
--- a/mix.exs
+++ b/mix.exs
@@ -162,7 +162,7 @@ defmodule Lightning.MixProject do
if path = System.get_env("PHILTER_PATH") do
{:philter, path: path}
else
- {:philter, "~> 0.2.1"}
+ {:philter, "~> 0.3.0"}
end
end
diff --git a/mix.lock b/mix.lock
index 365ea805715..075c7933ac2 100644
--- a/mix.lock
+++ b/mix.lock
@@ -103,7 +103,7 @@
"parse_trans": {:hex, :parse_trans, "3.4.1", "6e6aa8167cb44cc8f39441d05193be6e6f4e7c2946cb2759f015f8c56b76e5ff", [:rebar3], [], "hexpm", "620a406ce75dada827b82e453c19cf06776be266f5a67cff34e1ef2cbb60e49a"},
"peep": {:hex, :peep, "3.5.0", "9f6ead7b0f2c684494200c8fc02e7e62e8c459afe861b29bd859e4c96f402ed8", [:mix], [{:nimble_options, "~> 1.1", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:plug, "~> 1.16", [hex: :plug, repo: "hexpm", optional: true]}, {:telemetry_metrics, "~> 1.0", [hex: :telemetry_metrics, repo: "hexpm", optional: false]}], "hexpm", "5a73a99c6e60062415efeb7e536a663387146463a3d3df1417da31fd665ac210"},
"petal_components": {:hex, :petal_components, "3.0.1", "58cd70f9c5e4896ed8e41b095f19770fa56ca0855d99790c4a26b5f04fa52283", [:mix], [{:phoenix, "~> 1.7", [hex: :phoenix, repo: "hexpm", optional: false]}, {:phoenix_ecto, "~> 4.4", [hex: :phoenix_ecto, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 4.1", [hex: :phoenix_html, repo: "hexpm", optional: false]}, {:phoenix_html_helpers, "~> 1.0", [hex: :phoenix_html_helpers, repo: "hexpm", optional: false]}, {:phoenix_live_view, "~> 1.0", [hex: :phoenix_live_view, repo: "hexpm", optional: false]}, {:websock_adapter, "~> 0.5.7", [hex: :websock_adapter, repo: "hexpm", optional: false]}], "hexpm", "1195bc30979284f01a5fa2430e370d8378c635e083179c2b2fdbecf21cce05c1"},
- "philter": {:hex, :philter, "0.2.1", "48239f0913745c1a58bf1691993cbf19fc766e20846ccbe961a211870d1f99c3", [:mix], [{:finch, "~> 0.18", [hex: :finch, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix, "~> 1.7", [hex: :phoenix, repo: "hexpm", optional: true]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "3bc2ff7a61d08936621544df9b65afae46b4bd4cb9a2412eeace450a762a3ff9"},
+ "philter": {:hex, :philter, "0.3.0", "7142e315cd1265365fa9d5c40a48530135c34b79ea160e9cd1eee1fc7acc297e", [:mix], [{:finch, "~> 0.18", [hex: :finch, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix, "~> 1.7", [hex: :phoenix, repo: "hexpm", optional: true]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "ddb96add693cb6749f26b7663c1319f32d5e898b8ffad7f6a4c1e5e7ed2e225e"},
"phoenix": {:hex, :phoenix, "1.7.23", "2a86f055b50f3ca2e692f8bc0e757b7bde6a44182476ec9193e337ccb7cf5492", [:mix], [{:castore, ">= 0.0.0", [hex: :castore, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix_pubsub, "~> 2.1", [hex: :phoenix_pubsub, repo: "hexpm", optional: false]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}, {:phoenix_view, "~> 2.0", [hex: :phoenix_view, repo: "hexpm", optional: true]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 2.7", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:plug_crypto, "~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}, {:websock_adapter, "~> 0.5.3", [hex: :websock_adapter, repo: "hexpm", optional: false]}], "hexpm", "98b551a267cbcd0ca4a2bfe05ff2fb3cd68699197a2a3e14504f6b7be758ca9d"},
"phoenix_ecto": {:hex, :phoenix_ecto, "4.6.5", "c4ef322acd15a574a8b1a08eff0ee0a85e73096b53ce1403b6563709f15e1cea", [:mix], [{:ecto, "~> 3.5", [hex: :ecto, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 2.14.2 or ~> 3.0 or ~> 4.1", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:plug, "~> 1.9", [hex: :plug, repo: "hexpm", optional: false]}, {:postgrex, "~> 0.16 or ~> 1.0", [hex: :postgrex, repo: "hexpm", optional: true]}], "hexpm", "26ec3208eef407f31b748cadd044045c6fd485fbff168e35963d2f9dfff28d4b"},
"phoenix_html": {:hex, :phoenix_html, "4.3.0", "d3577a5df4b6954cd7890c84d955c470b5310bb49647f0a114a6eeecc850f7ad", [:mix], [], "hexpm", "3eaa290a78bab0f075f791a46a981bbe769d94bc776869f4f3063a14f30497ad"},
diff --git a/priv/repo/migrations/20260409100001_add_channel_event_detail_columns.exs b/priv/repo/migrations/20260409100001_add_channel_event_detail_columns.exs
new file mode 100644
index 00000000000..bd773332ede
--- /dev/null
+++ b/priv/repo/migrations/20260409100001_add_channel_event_detail_columns.exs
@@ -0,0 +1,13 @@
+defmodule Lightning.Repo.Migrations.AddChannelEventDetailColumns do
+ use Ecto.Migration
+
+ def change do
+ alter table(:channel_events) do
+ add :request_query_string, :text
+ add :request_body_size, :bigint
+ add :response_body_size, :bigint
+ add :request_send_us, :integer
+ add :response_duration_us, :integer
+ end
+ end
+end
diff --git a/priv/repo/migrations/20260409100002_convert_channel_event_headers_to_jsonb.exs b/priv/repo/migrations/20260409100002_convert_channel_event_headers_to_jsonb.exs
new file mode 100644
index 00000000000..29598390ad3
--- /dev/null
+++ b/priv/repo/migrations/20260409100002_convert_channel_event_headers_to_jsonb.exs
@@ -0,0 +1,39 @@
+defmodule Lightning.Repo.Migrations.ConvertChannelEventHeadersToJsonb do
+ use Ecto.Migration
+
+ def up do
+ execute """
+ ALTER TABLE channel_events
+ ALTER COLUMN request_headers TYPE jsonb
+ USING CASE
+ WHEN request_headers IS NULL THEN NULL
+ WHEN request_headers::jsonb IS NOT NULL THEN request_headers::jsonb
+ ELSE NULL
+ END
+ """
+
+ execute """
+ ALTER TABLE channel_events
+ ALTER COLUMN response_headers TYPE jsonb
+ USING CASE
+ WHEN response_headers IS NULL THEN NULL
+ WHEN response_headers::jsonb IS NOT NULL THEN response_headers::jsonb
+ ELSE NULL
+ END
+ """
+ end
+
+ def down do
+ execute """
+ ALTER TABLE channel_events
+ ALTER COLUMN request_headers TYPE text
+ USING request_headers::text
+ """
+
+ execute """
+ ALTER TABLE channel_events
+ ALTER COLUMN response_headers TYPE text
+ USING response_headers::text
+ """
+ end
+end
diff --git a/priv/repo/migrations/20260409100003_add_client_auth_tracking_to_channel_requests.exs b/priv/repo/migrations/20260409100003_add_client_auth_tracking_to_channel_requests.exs
new file mode 100644
index 00000000000..df4eb79d0d0
--- /dev/null
+++ b/priv/repo/migrations/20260409100003_add_client_auth_tracking_to_channel_requests.exs
@@ -0,0 +1,14 @@
+defmodule Lightning.Repo.Migrations.AddClientAuthTrackingToChannelRequests do
+ use Ecto.Migration
+
+ def change do
+ alter table(:channel_requests) do
+ add :client_webhook_auth_method_id,
+ references(:webhook_auth_methods, type: :binary_id, on_delete: :nilify_all)
+
+ add :client_auth_type, :string
+ end
+
+ create index(:channel_requests, [:client_webhook_auth_method_id])
+ end
+end
diff --git a/priv/repo/migrations/20260410131136_rename_timing_fields_to_microseconds.exs b/priv/repo/migrations/20260410131136_rename_timing_fields_to_microseconds.exs
new file mode 100644
index 00000000000..bf47ab281d6
--- /dev/null
+++ b/priv/repo/migrations/20260410131136_rename_timing_fields_to_microseconds.exs
@@ -0,0 +1,26 @@
+defmodule Lightning.Repo.Migrations.RenameTimingFieldsToMicroseconds do
+ use Ecto.Migration
+
+ def change do
+ rename table(:channel_events), :latency_ms, to: :latency_us
+ rename table(:channel_events), :ttfb_ms, to: :ttfb_us
+
+ flush()
+
+ execute(
+ "UPDATE channel_events SET latency_us = latency_us * 1000",
+ "UPDATE channel_events SET latency_us = latency_us / 1000"
+ )
+
+ execute(
+ "UPDATE channel_events SET ttfb_us = ttfb_us * 1000",
+ "UPDATE channel_events SET ttfb_us = ttfb_us / 1000"
+ )
+
+ alter table(:channel_events) do
+ add :queue_us, :integer
+ add :connect_us, :integer
+ add :reused_connection, :boolean
+ end
+ end
+end
diff --git a/priv/repo/migrations/20260423133517_add_destination_credential_id_to_channel_requests.exs b/priv/repo/migrations/20260423133517_add_destination_credential_id_to_channel_requests.exs
new file mode 100644
index 00000000000..a3d82ac08cf
--- /dev/null
+++ b/priv/repo/migrations/20260423133517_add_destination_credential_id_to_channel_requests.exs
@@ -0,0 +1,15 @@
+defmodule Lightning.Repo.Migrations.AddDestinationCredentialIdToChannelRequests do
+ use Ecto.Migration
+
+ def change do
+ alter table(:channel_requests) do
+ add :destination_credential_id,
+ references(:project_credentials,
+ type: :binary_id,
+ on_delete: :nilify_all
+ )
+ end
+
+ create index(:channel_requests, [:destination_credential_id])
+ end
+end
diff --git a/test/lightning/channels/channel_requests_test.exs b/test/lightning/channels/channel_requests_test.exs
index 105173d1c9f..cb9f896b85a 100644
--- a/test/lightning/channels/channel_requests_test.exs
+++ b/test/lightning/channels/channel_requests_test.exs
@@ -284,6 +284,111 @@ defmodule Lightning.Channels.ChannelRequestsTest do
end
end
+ # ---------------------------------------------------------------
+ # Phase 1a contract tests — client auth method tracking on requests
+ # ---------------------------------------------------------------
+ #
+ # These tests define the target interface after:
+ # - D3: client_webhook_auth_method_id and client_auth_type on channel_requests
+ #
+ # They will not compile/pass until Phase 1b implements the changes.
+
+ describe "ChannelRequest changeset — auth method fields" do
+ test "accepts client_webhook_auth_method_id and client_auth_type" do
+ channel = insert(:channel)
+ {:ok, snapshot} = Channels.get_or_create_current_snapshot(channel)
+ auth_method = insert(:webhook_auth_method, project: channel.project)
+
+ attrs = %{
+ channel_id: channel.id,
+ channel_snapshot_id: snapshot.id,
+ request_id: "req-auth-test",
+ state: :success,
+ started_at: DateTime.utc_now(),
+ client_webhook_auth_method_id: auth_method.id,
+ client_auth_type: "api"
+ }
+
+ changeset = ChannelRequest.changeset(%ChannelRequest{}, attrs)
+ assert changeset.valid?
+
+ {:ok, request} = Repo.insert(changeset)
+ assert request.client_webhook_auth_method_id == auth_method.id
+ assert request.client_auth_type == "api"
+ end
+
+ test "auth method fields are nullable" do
+ channel = insert(:channel)
+ {:ok, snapshot} = Channels.get_or_create_current_snapshot(channel)
+
+ attrs = %{
+ channel_id: channel.id,
+ channel_snapshot_id: snapshot.id,
+ request_id: "req-no-auth",
+ state: :success,
+ started_at: DateTime.utc_now()
+ }
+
+ {:ok, request} =
+ ChannelRequest.changeset(%ChannelRequest{}, attrs) |> Repo.insert()
+
+ assert request.client_webhook_auth_method_id == nil
+ assert request.client_auth_type == nil
+ end
+
+ test "belongs_to client_webhook_auth_method loads correctly" do
+ channel = insert(:channel)
+ {:ok, snapshot} = Channels.get_or_create_current_snapshot(channel)
+ auth_method = insert(:webhook_auth_method, project: channel.project)
+
+ request =
+ insert(:channel_request,
+ channel: channel,
+ channel_snapshot: snapshot,
+ client_webhook_auth_method_id: auth_method.id,
+ client_auth_type: "basic"
+ )
+
+ loaded =
+ ChannelRequest
+ |> Repo.get!(request.id)
+ |> Repo.preload(:client_webhook_auth_method)
+
+ assert loaded.client_webhook_auth_method.id == auth_method.id
+ assert loaded.client_auth_type == "basic"
+ end
+ end
+
+ describe "client_webhook_auth_method_id nilification on delete" do
+ test "FK is nilified when auth method is deleted, client_auth_type survives" do
+ channel = insert(:channel)
+ {:ok, snapshot} = Channels.get_or_create_current_snapshot(channel)
+ auth_method = insert(:webhook_auth_method, project: channel.project)
+
+ request =
+ insert(:channel_request,
+ channel: channel,
+ channel_snapshot: snapshot,
+ client_webhook_auth_method_id: auth_method.id,
+ client_auth_type: "api"
+ )
+
+ # Verify FK is set
+ assert Repo.get!(ChannelRequest, request.id).client_webhook_auth_method_id ==
+ auth_method.id
+
+ # Delete the auth method
+ Repo.delete!(auth_method)
+
+ # FK should be nilified by on_delete: :nilify_all
+ reloaded = Repo.get!(ChannelRequest, request.id)
+ assert reloaded.client_webhook_auth_method_id == nil
+
+ # client_auth_type is a denormalized snapshot — it survives deletion
+ assert reloaded.client_auth_type == "api"
+ end
+ end
+
describe "delete_channel/2 with requests" do
test "removes requests before deleting channel" do
user = insert(:user)
diff --git a/test/lightning/channels/channel_stats_test.exs b/test/lightning/channels/channel_stats_test.exs
index 696540386c6..9d72fdb7db4 100644
--- a/test/lightning/channels/channel_stats_test.exs
+++ b/test/lightning/channels/channel_stats_test.exs
@@ -2,7 +2,6 @@ defmodule Lightning.Channels.ChannelStatsTest do
use Lightning.DataCase, async: true
alias Lightning.Channels
- alias Lightning.Channels.ChannelRequest
alias Lightning.Channels.SearchParams
describe "get_channel_stats_for_project/1" do
@@ -20,29 +19,23 @@ defmodule Lightning.Channels.ChannelStatsTest do
{:ok, snapshot1} = Channels.get_or_create_current_snapshot(channel1)
{:ok, snapshot2} = Channels.get_or_create_current_snapshot(channel2)
- Lightning.Repo.insert!(%ChannelRequest{
- channel_id: channel1.id,
- channel_snapshot_id: snapshot1.id,
- request_id: "stats-r1",
- state: :success,
- started_at: DateTime.utc_now()
- })
-
- Lightning.Repo.insert!(%ChannelRequest{
- channel_id: channel1.id,
- channel_snapshot_id: snapshot1.id,
- request_id: "stats-r2",
- state: :success,
- started_at: DateTime.utc_now()
- })
-
- Lightning.Repo.insert!(%ChannelRequest{
- channel_id: channel2.id,
- channel_snapshot_id: snapshot2.id,
- request_id: "stats-r3",
- state: :success,
- started_at: DateTime.utc_now()
- })
+ insert(:channel_request,
+ channel: channel1,
+ channel_snapshot: snapshot1,
+ state: :success
+ )
+
+ insert(:channel_request,
+ channel: channel1,
+ channel_snapshot: snapshot1,
+ state: :success
+ )
+
+ insert(:channel_request,
+ channel: channel2,
+ channel_snapshot: snapshot2,
+ state: :success
+ )
assert %{total_channels: 2, total_requests: 3} =
Channels.get_channel_stats_for_project(project.id)
@@ -53,13 +46,11 @@ defmodule Lightning.Channels.ChannelStatsTest do
other_channel = insert(:channel)
{:ok, snapshot} = Channels.get_or_create_current_snapshot(other_channel)
- Lightning.Repo.insert!(%ChannelRequest{
- channel_id: other_channel.id,
- channel_snapshot_id: snapshot.id,
- request_id: "stats-other-r1",
- state: :success,
- started_at: DateTime.utc_now()
- })
+ insert(:channel_request,
+ channel: other_channel,
+ channel_snapshot: snapshot,
+ state: :success
+ )
assert %{total_requests: 0} =
Channels.get_channel_stats_for_project(project.id)
diff --git a/test/lightning/channels/handler_test.exs b/test/lightning/channels/handler_test.exs
index 23aa5c5fec2..897af8fe8ce 100644
--- a/test/lightning/channels/handler_test.exs
+++ b/test/lightning/channels/handler_test.exs
@@ -122,7 +122,11 @@ defmodule Lightning.Channels.HandlerTest do
end
test "creates ChannelEvent with correct fields", %{state: state} do
- result = finished_result(status: 200, duration_us: 50_000)
+ result =
+ finished_result(
+ status: 200,
+ timing: %{total_us: 50_000, send_us: 2_000, recv_us: 1_000}
+ )
assert {:ok, _state} = Handler.handle_response_finished(result, state)
@@ -132,8 +136,8 @@ defmodule Lightning.Channels.HandlerTest do
assert event.request_method == state.request_method
assert event.request_path == "/test/path"
assert event.response_status == 200
- assert event.latency_ms == 50
- assert event.ttfb_ms == 10
+ assert event.latency_us == 50_000
+ assert event.ttfb_us == 10_000
assert event.error_message == nil
end
@@ -190,6 +194,242 @@ defmodule Lightning.Channels.HandlerTest do
end
end
+ # ---------------------------------------------------------------
+ # Phase 1a contract tests — Philter 0.3.0 adaptation + new fields
+ # ---------------------------------------------------------------
+ #
+ # These tests define the target interface after:
+ # - D1: New columns on channel_events (body sizes, durations, query string)
+ # - D2: Headers text → jsonb migration
+ # - D4: Handler reads from Philter 0.3.0 result structure
+ #
+ # They will not compile/pass until Phase 1b implements the changes.
+
+ describe "ChannelEvent changeset — new fields" do
+ test "accepts body size fields" do
+ attrs = %{
+ channel_request_id: Ecto.UUID.generate(),
+ type: :destination_response,
+ request_body_size: 1024,
+ response_body_size: 2048
+ }
+
+ changeset = ChannelEvent.changeset(%ChannelEvent{}, attrs)
+ assert changeset.valid?
+ assert changeset.changes.request_body_size == 1024
+ assert changeset.changes.response_body_size == 2048
+ end
+
+ test "accepts per-direction duration fields" do
+ attrs = %{
+ channel_request_id: Ecto.UUID.generate(),
+ type: :destination_response,
+ request_send_us: 3_500,
+ response_duration_us: 8_000
+ }
+
+ changeset = ChannelEvent.changeset(%ChannelEvent{}, attrs)
+ assert changeset.valid?
+ assert changeset.changes.request_send_us == 3_500
+ assert changeset.changes.response_duration_us == 8_000
+ end
+
+ test "accepts request_query_string" do
+ attrs = %{
+ channel_request_id: Ecto.UUID.generate(),
+ type: :destination_response,
+ request_query_string: "page=1&limit=10"
+ }
+
+ changeset = ChannelEvent.changeset(%ChannelEvent{}, attrs)
+ assert changeset.valid?
+ assert changeset.changes.request_query_string == "page=1&limit=10"
+ end
+
+ test "new fields are all nullable" do
+ attrs = %{
+ channel_request_id: Ecto.UUID.generate(),
+ type: :error,
+ error_message: "nxdomain"
+ }
+
+ changeset = ChannelEvent.changeset(%ChannelEvent{}, attrs)
+ assert changeset.valid?
+ refute Map.has_key?(changeset.changes, :request_body_size)
+ refute Map.has_key?(changeset.changes, :response_body_size)
+ refute Map.has_key?(changeset.changes, :request_send_us)
+ refute Map.has_key?(changeset.changes, :response_duration_us)
+ refute Map.has_key?(changeset.changes, :request_query_string)
+ end
+ end
+
+ describe "persist_completion — Philter 0.3.0 fields" do
+ setup %{state: state} do
+ metadata = request_metadata()
+ {:ok, state} = Handler.handle_request_started(metadata, state)
+
+ state =
+ Map.merge(state, %{
+ ttfb_us: 10_000,
+ response_status: 200,
+ response_headers: [{"content-type", "text/plain"}]
+ })
+
+ %{state: state}
+ end
+
+ test "uses timing.total_us for latency_us", %{state: state} do
+ result = philter_result(timing: %{total_us: 50_000, send_us: 2_000})
+
+ assert {:ok, _state} = Handler.handle_response_finished(result, state)
+
+ event = Repo.one!(ChannelEvent)
+ assert event.latency_us == 50_000
+ end
+
+ test "persists request_send_us from timing.send_us", %{state: state} do
+ result = philter_result(timing: %{total_us: 50_000, send_us: 3_500})
+
+ assert {:ok, _state} = Handler.handle_response_finished(result, state)
+
+ event = Repo.one!(ChannelEvent)
+ assert event.request_send_us == 3_500
+ end
+
+ test "persists response_duration_us from timing.recv_us",
+ %{state: state} do
+ result =
+ philter_result(
+ timing: %{total_us: 50_000, send_us: 2_000, recv_us: 8_000}
+ )
+
+ assert {:ok, _state} = Handler.handle_response_finished(result, state)
+
+ event = Repo.one!(ChannelEvent)
+ assert event.response_duration_us == 8_000
+ end
+
+ test "persists body sizes from observations", %{state: state} do
+ result =
+ philter_result(
+ request_observation: %{
+ hash: "req123",
+ size: 1024,
+ body: nil,
+ preview: "request body"
+ },
+ response_observation: %{
+ hash: "resp123",
+ size: 2048,
+ body: nil,
+ preview: "response body"
+ }
+ )
+
+ assert {:ok, _state} = Handler.handle_response_finished(result, state)
+
+ event = Repo.one!(ChannelEvent)
+ assert event.request_body_size == 1024
+ assert event.response_body_size == 2048
+ end
+
+ test "persists query string from handler state", %{state: state} do
+ state = Map.put(state, :query_string, "page=1&limit=10")
+ result = philter_result()
+
+ assert {:ok, _state} = Handler.handle_response_finished(result, state)
+
+ event = Repo.one!(ChannelEvent)
+ assert event.request_query_string == "page=1&limit=10"
+ end
+
+ test "nil phase timings when collect_timing is disabled", %{state: state} do
+ result =
+ philter_result(timing: %{total_us: 50_000, send_us: nil, recv_us: nil})
+
+ assert {:ok, _state} = Handler.handle_response_finished(result, state)
+
+ event = Repo.one!(ChannelEvent)
+ assert event.request_send_us == nil
+ assert event.response_duration_us == nil
+ assert event.latency_us == 50_000
+ end
+ end
+
+ describe "header encoding — native jsonb" do
+ setup %{state: state} do
+ metadata =
+ request_metadata(
+ headers: [
+ {"content-type", "application/json"},
+ {"x-custom", "value"}
+ ]
+ )
+
+ {:ok, state} = Handler.handle_request_started(metadata, state)
+
+ state =
+ Map.merge(state, %{
+ ttfb_us: 10_000,
+ response_status: 200,
+ response_headers: [
+ {"content-type", "text/plain"},
+ {"x-resp", "val"}
+ ]
+ })
+
+ %{state: state}
+ end
+
+ test "request headers round-trip as list without Jason.decode!", %{
+ state: state
+ } do
+ result = philter_result()
+ Handler.handle_response_finished(result, state)
+
+ event = Repo.one!(ChannelEvent)
+
+ # After jsonb migration, headers are native lists, not JSON strings
+ assert is_list(event.request_headers)
+
+ assert event.request_headers == [
+ ["content-type", "application/json"],
+ ["x-custom", "value"]
+ ]
+ end
+
+ test "response headers round-trip as list without Jason.decode!", %{
+ state: state
+ } do
+ result = philter_result()
+ Handler.handle_response_finished(result, state)
+
+ event = Repo.one!(ChannelEvent)
+
+ assert is_list(event.response_headers)
+
+ assert event.response_headers == [
+ ["content-type", "text/plain"],
+ ["x-resp", "val"]
+ ]
+ end
+
+ test "nil headers remain nil", %{state: state} do
+ state = Map.delete(state, :response_headers)
+
+ result =
+ philter_result(
+ status: nil,
+ error: %Mint.TransportError{reason: :econnrefused}
+ )
+
+ Handler.handle_response_finished(result, state)
+
+ event = Repo.one!(ChannelEvent)
+ assert event.response_headers == nil
+ end
+ end
+
# Helpers
defp request_metadata(overrides \\ []) do
@@ -213,9 +453,37 @@ defmodule Lightning.Channels.HandlerTest do
hash: "abc123",
size: 100,
body: nil,
- preview: "test body",
- duration_us: 1000,
- time_to_first_byte_us: 500
+ preview: "test body"
+ }
+
+ %{
+ request_observation:
+ Keyword.get(overrides, :request_observation, observation),
+ response_observation:
+ Keyword.get(overrides, :response_observation, observation),
+ error: Keyword.get(overrides, :error, nil),
+ upstream_url:
+ Keyword.get(overrides, :upstream_url, "http://localhost:4999"),
+ method: Keyword.get(overrides, :method, "GET"),
+ status: Keyword.get(overrides, :status, 200),
+ timing:
+ Keyword.get(overrides, :timing, %{
+ total_us: 10_000,
+ send_us: 2_000,
+ recv_us: 1_000
+ })
+ }
+ end
+
+ # Philter 0.3.0 result format:
+ # - Observations are content-only (hash, size, preview, body)
+ # - All timing lives in the top-level timing map
+ defp philter_result(overrides \\ []) do
+ observation = %{
+ hash: "abc123",
+ size: 100,
+ body: nil,
+ preview: "test body"
}
%{
@@ -228,7 +496,12 @@ defmodule Lightning.Channels.HandlerTest do
Keyword.get(overrides, :upstream_url, "http://localhost:4999"),
method: Keyword.get(overrides, :method, "GET"),
status: Keyword.get(overrides, :status, 200),
- duration_us: Keyword.get(overrides, :duration_us, 10_000)
+ timing:
+ Keyword.get(overrides, :timing, %{
+ total_us: 10_000,
+ send_us: 2_000,
+ recv_us: 1_000
+ })
}
end
end
diff --git a/test/lightning/channels_test.exs b/test/lightning/channels_test.exs
index 7500a759c3c..9f32906fe49 100644
--- a/test/lightning/channels_test.exs
+++ b/test/lightning/channels_test.exs
@@ -6,7 +6,6 @@ defmodule Lightning.ChannelsTest do
alias Lightning.Auditing.Audit
alias Lightning.Channels
alias Lightning.Channels.Channel
- alias Lightning.Channels.ChannelRequest
alias Lightning.Channels.ChannelSnapshot
describe "list_channels_for_project/1" do
@@ -43,21 +42,19 @@ defmodule Lightning.ChannelsTest do
t1 = ~U[2025-01-01 10:00:00.000000Z]
t2 = ~U[2025-01-02 12:00:00.000000Z]
- Lightning.Repo.insert!(%ChannelRequest{
- channel_id: channel.id,
- channel_snapshot_id: snapshot.id,
- request_id: "req-stats-1",
+ insert(:channel_request,
+ channel: channel,
+ channel_snapshot: snapshot,
state: :success,
started_at: t1
- })
+ )
- Lightning.Repo.insert!(%ChannelRequest{
- channel_id: channel.id,
- channel_snapshot_id: snapshot.id,
- request_id: "req-stats-2",
+ insert(:channel_request,
+ channel: channel,
+ channel_snapshot: snapshot,
state: :success,
started_at: t2
- })
+ )
results = Channels.list_channels_for_project_with_stats(project.id)
@@ -79,13 +76,12 @@ defmodule Lightning.ChannelsTest do
{:ok, snapshot_b} = Channels.get_or_create_current_snapshot(channel_b)
- Lightning.Repo.insert!(%ChannelRequest{
- channel_id: channel_b.id,
- channel_snapshot_id: snapshot_b.id,
- request_id: "req-stats-3",
+ insert(:channel_request,
+ channel: channel_b,
+ channel_snapshot: snapshot_b,
state: :success,
started_at: ~U[2025-06-01 00:00:00.000000Z]
- })
+ )
results = Channels.list_channels_for_project_with_stats(project.id)
@@ -246,6 +242,34 @@ defmodule Lightning.ChannelsTest do
assert audit.actor_id == user.id
end
+ test "returns {:ok, channel} when submitted with no real changes", %{
+ user: user
+ } do
+ channel = insert(:channel)
+
+ # Pass back the current values — empty changes map. Previously this
+ # crashed with FunctionClauseError because Audit.event/4 returned
+ # :no_changes and that was piped into Multi.insert/3.
+ assert {:ok, unchanged} =
+ Channels.update_channel(
+ channel,
+ %{name: channel.name, destination_url: channel.destination_url},
+ actor: user
+ )
+
+ assert unchanged.id == channel.id
+ assert unchanged.lock_version == channel.lock_version
+
+ # No audit row was written for the no-op save
+ assert [] ==
+ Repo.all(
+ from a in Audit,
+ where:
+ a.item_id == ^channel.id and a.item_type == "channel" and
+ a.event == "updated"
+ )
+ end
+
test "passing nil for destination_auth_method removes the join record",
%{user: user} do
project = insert(:project)
@@ -433,4 +457,91 @@ defmodule Lightning.ChannelsTest do
assert snapshot2.name == "updated-name"
end
end
+
+ describe "get_channel_request_for_project/2" do
+ test "returns channel request with preloads when project matches" do
+ project = insert(:project)
+ user = insert(:user)
+
+ webhook_auth_method =
+ insert(:webhook_auth_method, project: project, auth_type: :api)
+
+ credential =
+ insert(:credential, user: user, name: "dest-cred", schema: "http")
+
+ project_credential =
+ insert(:project_credential, project: project, credential: credential)
+
+ channel = insert(:channel, project: project)
+ {:ok, snapshot} = Channels.get_or_create_current_snapshot(channel)
+
+ request =
+ insert(:channel_request,
+ channel: channel,
+ channel_snapshot: snapshot,
+ state: :success,
+ started_at: DateTime.utc_now(),
+ client_webhook_auth_method_id: webhook_auth_method.id,
+ client_auth_type: "api",
+ destination_credential_id: project_credential.id
+ )
+
+ event =
+ insert(:channel_event,
+ channel_request: request,
+ request_path: "/test",
+ latency_us: 100_000
+ )
+
+ result = Channels.get_channel_request_for_project(project.id, request.id)
+
+ assert result.id == request.id
+ assert result.channel.id == channel.id
+ assert result.channel_snapshot.id == snapshot.id
+
+ assert length(result.channel_events) == 1
+ assert hd(result.channel_events).id == event.id
+
+ # Client and destination auth tracking are preloaded (no N+1).
+ assert result.client_webhook_auth_method.id == webhook_auth_method.id
+ assert result.destination_credential.id == project_credential.id
+ assert result.destination_credential.credential.name == "dest-cred"
+ end
+
+ test "returns nil when channel request belongs to a different project" do
+ project_a = insert(:project)
+ project_b = insert(:project)
+ channel = insert(:channel, project: project_a)
+ {:ok, snapshot} = Channels.get_or_create_current_snapshot(channel)
+
+ request =
+ insert(:channel_request,
+ channel: channel,
+ channel_snapshot: snapshot,
+ state: :success,
+ started_at: DateTime.utc_now()
+ )
+
+ assert Channels.get_channel_request_for_project(project_b.id, request.id) ==
+ nil
+ end
+
+ test "returns nil for non-existent request ID" do
+ project = insert(:project)
+
+ assert Channels.get_channel_request_for_project(
+ project.id,
+ Ecto.UUID.generate()
+ ) == nil
+ end
+
+ test "returns nil for invalid UUID" do
+ project = insert(:project)
+
+ assert Channels.get_channel_request_for_project(
+ project.id,
+ "not-a-valid-uuid"
+ ) == nil
+ end
+ end
end
diff --git a/test/lightning_web/live/channel_live/form_test.exs b/test/lightning_web/live/channel_live/form_test.exs
index 7ab177cebae..c862d053c8f 100644
--- a/test/lightning_web/live/channel_live/form_test.exs
+++ b/test/lightning_web/live/channel_live/form_test.exs
@@ -156,23 +156,22 @@ defmodule LightningWeb.ChannelLive.FormTest do
{:ok, view, html} =
live(conn, ~p"/projects/#{project.id}/channels/new")
- # Fields appear in this order: Name, Destination URL,
- # Destination Credential, Enabled, Client Credentials
+ # Fields appear in this order: Name, Enabled, Destination URL,
+ # Destination Credential, Client Credentials
name_pos = :binary.match(html, "Name") |> elem(0)
+ enabled_pos = :binary.match(html, "Enabled") |> elem(0)
dest_url_pos = :binary.match(html, "Destination URL") |> elem(0)
dest_cred_pos =
:binary.match(html, "Destination Credential") |> elem(0)
- enabled_pos = :binary.match(html, "Enabled") |> elem(0)
-
client_cred_pos =
:binary.match(html, "Client Credentials") |> elem(0)
- assert name_pos < dest_url_pos
+ assert name_pos < enabled_pos
+ assert enabled_pos < dest_url_pos
assert dest_url_pos < dest_cred_pos
- assert dest_cred_pos < enabled_pos
- assert enabled_pos < client_cred_pos
+ assert dest_cred_pos < client_cred_pos
# Sublabels
assert html =~ "The service OpenFn will forward requests to"
diff --git a/test/lightning_web/live/channel_request_live/helpers_test.exs b/test/lightning_web/live/channel_request_live/helpers_test.exs
new file mode 100644
index 00000000000..c8f37ab573c
--- /dev/null
+++ b/test/lightning_web/live/channel_request_live/helpers_test.exs
@@ -0,0 +1,97 @@
+defmodule LightningWeb.ChannelRequestLive.HelpersTest do
+ use ExUnit.Case, async: true
+
+ alias LightningWeb.ChannelRequestLive.Helpers
+
+ describe "humanize_error/1" do
+ test "maps transport error codes to human messages" do
+ assert Helpers.humanize_error("nxdomain") =~
+ "DNS lookup failed"
+
+ assert Helpers.humanize_error("econnrefused") =~
+ "Connection refused"
+
+ assert Helpers.humanize_error("ehostunreach") =~
+ "Host unreachable"
+
+ assert Helpers.humanize_error("enetunreach") =~
+ "Network unreachable"
+
+ assert Helpers.humanize_error("closed") =~
+ "Connection closed unexpectedly"
+
+ assert Helpers.humanize_error("econnreset") =~
+ "Connection reset"
+
+ assert Helpers.humanize_error("econnaborted") =~
+ "Connection aborted"
+
+ assert Helpers.humanize_error("epipe") =~
+ "Broken pipe"
+
+ assert Helpers.humanize_error("connect_timeout") =~
+ "Connection timed out"
+
+ assert Helpers.humanize_error("response_timeout") =~
+ "Response timed out"
+
+ assert Helpers.humanize_error("timeout") =~
+ "Request timed out"
+ end
+
+ test "maps credential error codes to human messages" do
+ assert Helpers.humanize_error("credential_missing_auth_fields") =~
+ "missing required authentication fields"
+
+ assert Helpers.humanize_error("credential_environment_not_found") =~
+ "credential environment could not be found"
+
+ assert Helpers.humanize_error("oauth_refresh_failed") =~
+ "OAuth token refresh failed"
+
+ assert Helpers.humanize_error("oauth_reauthorization_required") =~
+ "OAuth credential needs to be re-authorized"
+ end
+
+ test "handles unsupported_credential_schema with dynamic name" do
+ result =
+ Helpers.humanize_error("unsupported_credential_schema:my_schema")
+
+ assert result =~ "Unsupported credential type"
+ assert result =~ "my_schema"
+ end
+
+ test "passes through unknown error codes unchanged" do
+ assert Helpers.humanize_error("some_unknown_error") ==
+ "some_unknown_error"
+ end
+ end
+
+ describe "error_category/1" do
+ test "classifies transport errors" do
+ for code <- ~w(nxdomain econnrefused ehostunreach enetunreach closed
+ econnreset econnaborted epipe connect_timeout
+ response_timeout timeout) do
+ assert Helpers.error_category(code) == :transport,
+ "expected #{code} to be :transport"
+ end
+ end
+
+ test "classifies credential errors" do
+ for code <- ~w(credential_missing_auth_fields
+ credential_environment_not_found
+ oauth_refresh_failed
+ oauth_reauthorization_required) do
+ assert Helpers.error_category(code) == :credential,
+ "expected #{code} to be :credential"
+ end
+
+ assert Helpers.error_category("unsupported_credential_schema:foo") ==
+ :credential
+ end
+
+ test "returns nil for unknown error codes" do
+ assert Helpers.error_category("something_else") == nil
+ end
+ end
+end
diff --git a/test/lightning_web/live/channel_request_live/show_test.exs b/test/lightning_web/live/channel_request_live/show_test.exs
new file mode 100644
index 00000000000..d2b8e8bea4c
--- /dev/null
+++ b/test/lightning_web/live/channel_request_live/show_test.exs
@@ -0,0 +1,660 @@
+defmodule LightningWeb.ChannelRequestLive.ShowTest do
+ use LightningWeb.ConnCase, async: true
+
+ import Phoenix.LiveViewTest
+ import Lightning.Factories
+
+ alias Lightning.Channels
+
+ setup :stub_rate_limiter_ok
+
+ defp enable_experimental_features(%{user: user}) do
+ Lightning.Accounts.update_user_preferences(user, %{
+ "experimental_features" => true
+ })
+
+ :ok
+ end
+
+ defp create_channel_request(project, attrs \\ %{}) do
+ attrs = Map.new(attrs)
+
+ channel =
+ Map.get_lazy(attrs, :channel, fn ->
+ insert(:channel, project: project)
+ end)
+
+ {:ok, snapshot} = Channels.get_or_create_current_snapshot(channel)
+
+ request =
+ insert(:channel_request,
+ channel: channel,
+ channel_snapshot: snapshot,
+ state: Map.get(attrs, :state, :success),
+ client_identity: Map.get(attrs, :client_identity, "192.168.1.1"),
+ client_auth_type: Map.get(attrs, :client_auth_type, "api"),
+ started_at: Map.get(attrs, :started_at, ~U[2026-04-10 10:00:00.000000Z]),
+ completed_at:
+ Map.get(attrs, :completed_at, ~U[2026-04-10 10:00:00.350000Z])
+ )
+
+ {request, channel, snapshot}
+ end
+
+ defp detail_path(project, request) do
+ ~p"/projects/#{project.id}/history/channels/#{request.id}"
+ end
+
+ describe "feature gate" do
+ setup [:register_and_log_in_user, :create_project_for_current_user]
+
+ test "redirects when experimental features are disabled", %{
+ conn: conn,
+ project: project
+ } do
+ {request, _channel, _snapshot} = create_channel_request(project)
+
+ assert {:error, {:redirect, _}} =
+ live(conn, detail_path(project, request))
+ end
+ end
+
+ describe "detail page — success state" do
+ setup [:register_and_log_in_user, :create_project_for_current_user]
+ setup :enable_experimental_features
+
+ test "renders summary card, metadata, headers, and body previews", %{
+ conn: conn,
+ project: project
+ } do
+ {request, channel, _snapshot} = create_channel_request(project)
+
+ insert(:channel_event,
+ channel_request: request,
+ request_query_string: "format=json"
+ )
+
+ {:ok, view, _html} = live(conn, detail_path(project, request))
+ html = render(view)
+
+ # Summary card
+ assert html =~ "POST"
+ assert html =~ "/api/v1/data"
+ assert html =~ "format=json"
+ assert html =~ "200"
+ assert html =~ "Success"
+ assert html =~ channel.name
+
+ # Metadata
+ assert html =~ "192.168.1.1"
+ assert html =~ "api"
+ assert html =~ String.slice(request.id, 0..7)
+ assert html =~ "350"
+ # Destination URL from channel
+ assert html =~ channel.destination_url
+ # Timestamps
+ assert html =~ "2026"
+ assert html =~ "10:00"
+
+ # Request headers
+ assert html =~ "content-type"
+ assert html =~ "authorization"
+ assert html =~ "[REDACTED]"
+
+ # Body previews (quotes are HTML-entity-encoded by LiveView's test DOM serializer)
+ assert html =~ "key"
+ assert html =~ "value"
+ assert html =~ "status"
+ assert html =~ "ok"
+ end
+ end
+
+ describe "detail page — error state" do
+ setup [:register_and_log_in_user, :create_project_for_current_user]
+ setup :enable_experimental_features
+
+ test "renders humanized error and raw string for transport error", %{
+ conn: conn,
+ project: project
+ } do
+ {request, _channel, _snapshot} =
+ create_channel_request(project, state: :error)
+
+ insert(:channel_error_event,
+ channel_request: request,
+ error_message: "econnrefused",
+ latency_us: 100_000
+ )
+
+ {:ok, view, _html} = live(conn, detail_path(project, request))
+ html = render(view)
+
+ assert html =~ "Connection refused"
+ assert html =~ "econnrefused"
+ end
+
+ test "renders credential error with appropriate messaging", %{
+ conn: conn,
+ project: project
+ } do
+ {request, _channel, _snapshot} =
+ create_channel_request(project, state: :error)
+
+ insert(:channel_error_event,
+ channel_request: request,
+ error_message: "credential_missing_auth_fields"
+ )
+
+ {:ok, view, _html} = live(conn, detail_path(project, request))
+ html = render(view)
+
+ assert html =~ "missing required authentication fields"
+ assert html =~ "credential_missing_auth_fields"
+ end
+ end
+
+ describe "detail page — timing section" do
+ setup [:register_and_log_in_user, :create_project_for_current_user]
+ setup :enable_experimental_features
+
+ test "renders full nested timeline with all Finch phases, overhead, and reused connection",
+ %{conn: conn, project: project} do
+ # --- Full phases with overhead ---
+ {req1, _ch1, _snap1} = create_channel_request(project)
+
+ # inner_sum = 2+15+5+158+65 = 245ms, latency = 260ms => 15ms overhead
+ insert(:channel_event,
+ channel_request: req1,
+ queue_us: 2_000,
+ connect_us: 15_000,
+ request_send_us: 5_000,
+ ttfb_us: 180_000,
+ response_duration_us: 65_000,
+ latency_us: 260_000
+ )
+
+ {:ok, view1, _html} = live(conn, detail_path(project, req1))
+ html1 = render(view1)
+
+ # Timing section present with bookend labels
+ assert html1 =~ ~s(id="timing-section")
+ assert html1 =~ "0 ms"
+ assert html1 =~ "260 ms"
+
+ # Phase segment title attributes (tooltip text)
+ assert html1 =~ ~s(title="Queue: 2 ms")
+ assert html1 =~ ~s(title="Connect: 15 ms")
+ assert html1 =~ ~s(title="Send: 5 ms")
+ assert html1 =~ ~s(title="Processing: 158 ms")
+ assert html1 =~ ~s(title="Recv: 65 ms")
+
+ # TTFB marker and legend with overhead swatch
+ assert html1 =~ "TTFB: 180 ms"
+ assert html1 =~ "Proxy overhead"
+
+ # --- Reused connection ---
+ {req2, _ch2, _snap2} = create_channel_request(project)
+
+ insert(:channel_event,
+ channel_request: req2,
+ reused_connection: true,
+ queue_us: 1_000,
+ connect_us: 0,
+ request_send_us: 4_000,
+ ttfb_us: 120_000,
+ response_duration_us: 30_000,
+ latency_us: 155_000
+ )
+
+ {:ok, view2, _html} = live(conn, detail_path(project, req2))
+ html2 = render(view2)
+
+ assert html2 =~ ~s(id="timing-section")
+ assert html2 =~ "(reused)"
+
+ # --- Processing segment from nil queue/connect ---
+ {req3, _ch3, _snap3} = create_channel_request(project)
+
+ # wait = ttfb - 0 - 0 - send = 200k - 10k = 190k
+ insert(:channel_event,
+ channel_request: req3,
+ queue_us: nil,
+ connect_us: nil,
+ request_send_us: 10_000,
+ ttfb_us: 200_000,
+ response_duration_us: 50_000,
+ latency_us: 260_000
+ )
+
+ {:ok, view3, _html} = live(conn, detail_path(project, req3))
+ html3 = render(view3)
+
+ assert html3 =~ ~s(title="Processing: 190 ms")
+ end
+
+ test "degrades gracefully through partial and minimal tiers",
+ %{conn: conn, project: project} do
+ # Partial tier: TTFB + latency only => TTFB/Download segments
+ {req1, _ch1, _snap1} = create_channel_request(project)
+
+ insert(:channel_event,
+ channel_request: req1,
+ request_send_us: nil,
+ response_duration_us: nil,
+ ttfb_us: 280_000,
+ latency_us: 350_000
+ )
+
+ {:ok, view1, _html} = live(conn, detail_path(project, req1))
+ html1 = render(view1)
+
+ assert html1 =~ ~s(title="TTFB: 280 ms")
+ assert html1 =~ ~s(title="Download: 70 ms")
+ assert html1 =~ "350 ms"
+ refute html1 =~ "Proxy overhead"
+
+ # Minimal tier: only latency_us => single Total bar
+ {req2, _ch2, _snap2} = create_channel_request(project)
+
+ insert(:channel_event,
+ channel_request: req2,
+ request_send_us: nil,
+ response_duration_us: nil,
+ ttfb_us: nil,
+ latency_us: 420_000
+ )
+
+ {:ok, view2, _html} = live(conn, detail_path(project, req2))
+ html2 = render(view2)
+
+ assert html2 =~ ~s(title="Total: 420 ms")
+ assert html2 =~ "420 ms"
+ end
+
+ test "positions TTFB marker on the inner-phase scale, not latency",
+ %{conn: conn, project: project} do
+ # inner_total = queue+connect+send+wait+recv
+ # = 10 + 20 + 5 + (ttfb - 10 - 20 - 5) + recv
+ # = ttfb + recv
+ # With ttfb=100ms and recv=100ms => inner_total=200ms => 50%
+ # latency_us is deliberately different (250ms) to prove the marker
+ # is scaled against inner_total, not total_us.
+ {req_half, _ch, _snap} = create_channel_request(project)
+
+ insert(:channel_event,
+ channel_request: req_half,
+ queue_us: 10_000,
+ connect_us: 20_000,
+ request_send_us: 5_000,
+ ttfb_us: 100_000,
+ response_duration_us: 100_000,
+ latency_us: 250_000
+ )
+
+ {:ok, view_half, _html} = live(conn, detail_path(project, req_half))
+ html_half = render(view_half)
+
+ assert html_half =~ ~s(style="left: 50.0%")
+
+ # Edge case: ttfb == inner_total => marker at 100%.
+ # inner_total = ttfb + recv, so set recv = 0 (response_duration_us = 0).
+ {req_full, _ch2, _snap2} = create_channel_request(project)
+
+ insert(:channel_event,
+ channel_request: req_full,
+ queue_us: 10_000,
+ connect_us: 20_000,
+ request_send_us: 5_000,
+ ttfb_us: 100_000,
+ response_duration_us: 0,
+ latency_us: 150_000
+ )
+
+ {:ok, view_full, _html} = live(conn, detail_path(project, req_full))
+ html_full = render(view_full)
+
+ assert html_full =~ ~s(style="left: 100.0%")
+ end
+
+ test "shows single bar for transport errors, hidden for credential errors",
+ %{conn: conn, project: project} do
+ {req_transport, _ch1, _snap1} =
+ create_channel_request(project, state: :timeout)
+
+ insert(:channel_error_event,
+ channel_request: req_transport,
+ error_message: "response_timeout",
+ latency_us: 30_000_000
+ )
+
+ {:ok, view1, _html} = live(conn, detail_path(project, req_transport))
+ html1 = render(view1)
+ assert html1 =~ ~s(id="timing-section")
+ assert html1 =~ ~s(title="Total: 30000 ms")
+
+ # Credential error: timing section hidden entirely
+ {req_cred, _ch2, _snap2} =
+ create_channel_request(project, state: :error)
+
+ insert(:channel_error_event,
+ channel_request: req_cred,
+ error_message: "credential_missing_auth_fields"
+ )
+
+ {:ok, view2, _html} = live(conn, detail_path(project, req_cred))
+ html2 = render(view2)
+ refute html2 =~ "timing-section"
+ end
+ end
+
+ describe "detail page — context section" do
+ setup [:register_and_log_in_user, :create_project_for_current_user]
+ setup :enable_experimental_features
+
+ test "renders snapshot data and config changed indicator when versions differ",
+ %{conn: conn, project: project, user: user} do
+ channel = insert(:channel, project: project)
+ {:ok, snapshot} = Channels.get_or_create_current_snapshot(channel)
+
+ request =
+ insert(:channel_request,
+ channel: channel,
+ channel_snapshot: snapshot,
+ state: :success,
+ started_at: DateTime.utc_now()
+ )
+
+ insert(:channel_event, channel_request: request)
+
+ {:ok, view, _html} = live(conn, detail_path(project, request))
+ html = render(view)
+
+ # Snapshot data renders
+ assert html =~ snapshot.destination_url
+ assert html =~ to_string(snapshot.lock_version)
+
+ # Bump channel version to create mismatch
+ {:ok, _updated} =
+ Channels.update_channel(channel, %{name: "updated-name"}, actor: user)
+
+ {:ok, view2, _html} = live(conn, detail_path(project, request))
+ html2 = render(view2)
+
+ assert html2 =~ "changed" or html2 =~ "Config"
+ end
+ end
+
+ describe "detail page — nil body" do
+ setup [:register_and_log_in_user, :create_project_for_current_user]
+ setup :enable_experimental_features
+
+ test "shows 'Body not captured' when body_preview is nil", %{
+ conn: conn,
+ project: project
+ } do
+ {request, _channel, _snapshot} = create_channel_request(project)
+
+ insert(:channel_event,
+ channel_request: request,
+ request_body_preview: nil,
+ request_body_hash: nil,
+ request_body_size: 2048
+ )
+
+ {:ok, view, _html} = live(conn, detail_path(project, request))
+ html = render(view)
+
+ assert html =~ "Body not captured"
+ end
+
+ test "hides body sub-section entirely when both preview and size are nil",
+ %{conn: conn, project: project} do
+ {request, _channel, _snapshot} =
+ create_channel_request(project, state: :error)
+
+ insert(:channel_error_event,
+ channel_request: request,
+ error_message: "credential_missing_auth_fields",
+ request_body_preview: nil,
+ request_body_hash: nil,
+ request_body_size: nil
+ )
+
+ {:ok, view, _html} = live(conn, detail_path(project, request))
+ html = render(view)
+
+ refute html =~ "Body not captured"
+ refute html =~ "request-body"
+ end
+
+ test "shows metadata only for binary (non-text) content-type", %{
+ conn: conn,
+ project: project
+ } do
+ {request, _channel, _snapshot} = create_channel_request(project)
+
+ insert(:channel_event,
+ channel_request: request,
+ response_headers: [["content-type", "application/octet-stream"]],
+ response_body_preview: nil,
+ response_body_size: 4096,
+ response_body_hash: "binaryhash123"
+ )
+
+ {:ok, view, _html} = live(conn, detail_path(project, request))
+ html = render(view)
+
+ # Should show size and hash metadata
+ assert html =~ "4096" or html =~ "4.0 KB" or html =~ "4 KB"
+ assert html =~ "binaryhash123"
+ # Should NOT render a body preview block
+ refute html =~ ~s({"status":"ok"})
+ end
+ end
+
+ describe "security" do
+ setup [:register_and_log_in_user, :create_project_for_current_user]
+ setup :enable_experimental_features
+
+ test "cross-project isolation and invalid UUID both return 404", %{
+ conn: conn,
+ project: project
+ } do
+ other_project = insert(:project)
+ {request, _channel, _snapshot} = create_channel_request(other_project)
+ insert(:channel_event, channel_request: request)
+
+ assert {:error, {:redirect, _}} =
+ live(conn, detail_path(project, request))
+
+ assert {:error, {:redirect, _}} =
+ live(
+ conn,
+ ~p"/projects/#{project.id}/history/channels/not-a-uuid"
+ )
+ end
+ end
+
+ describe "detail page — auth attribution" do
+ setup [:register_and_log_in_user, :create_project_for_current_user]
+ setup :enable_experimental_features
+
+ test "renders client auth method name and destination credential name when both present",
+ %{conn: conn, project: project, user: user} do
+ webhook_auth_method =
+ insert(:webhook_auth_method,
+ project: project,
+ auth_type: :api,
+ name: "Prod API key"
+ )
+
+ credential =
+ insert(:credential, user: user, name: "Destination API", schema: "http")
+
+ project_credential =
+ insert(:project_credential, project: project, credential: credential)
+
+ channel = insert(:channel, project: project)
+
+ {request, _channel, _snap} =
+ create_channel_request(project,
+ channel: channel,
+ client_auth_type: "api"
+ )
+
+ request
+ |> Ecto.Changeset.change(%{
+ client_webhook_auth_method_id: webhook_auth_method.id,
+ destination_credential_id: project_credential.id
+ })
+ |> Lightning.Repo.update!()
+
+ insert(:channel_event, channel_request: request)
+
+ {:ok, view, _html} = live(conn, detail_path(project, request))
+ html = render(view)
+
+ # Client auth: method name and auth type label
+ assert html =~ "Prod API key"
+ assert html =~ "API key"
+
+ # Destination auth: credential name
+ assert html =~ "Destination API"
+ end
+
+ test "renders 'None' when no client auth configured and credential name when destination set",
+ %{conn: conn, project: project, user: user} do
+ credential =
+ insert(:credential, user: user, name: "Only Destination", schema: "http")
+
+ project_credential =
+ insert(:project_credential, project: project, credential: credential)
+
+ {request, _channel, _snap} =
+ create_channel_request(project, client_auth_type: nil)
+
+ request
+ |> Ecto.Changeset.change(%{
+ client_webhook_auth_method_id: nil,
+ destination_credential_id: project_credential.id
+ })
+ |> Lightning.Repo.update!()
+
+ insert(:channel_event, channel_request: request)
+
+ {:ok, view, _html} = live(conn, detail_path(project, request))
+ html = render(view)
+
+ # Client auth column shows "None"
+ assert html =~ "None"
+ # Destination credential name still renders
+ assert html =~ "Only Destination"
+ end
+
+ test "renders '(deleted)' without crashing when destination credential id is set but association missing",
+ %{conn: conn, project: project, user: user} do
+ credential =
+ insert(:credential, user: user, name: "Will Be Gone", schema: "http")
+
+ project_credential =
+ insert(:project_credential, project: project, credential: credential)
+
+ {request, _channel, _snap} = create_channel_request(project)
+
+ request
+ |> Ecto.Changeset.change(%{
+ destination_credential_id: project_credential.id
+ })
+ |> Lightning.Repo.update!()
+
+ insert(:channel_event, channel_request: request)
+
+ # Break the FK with raw SQL to simulate a stale read: the id is
+ # still on the row but the credential row has been hard-deleted.
+ # `on_delete: :nilify_all` would clear the id under normal Ecto,
+ # so we bypass it with DELETE on project_credentials directly —
+ # which will cascade-nilify. The belt-and-suspenders render path
+ # for "id set, preload nil" is what we're covering here, so we
+ # then null out the id too and prove rendering still survives
+ # the absent credential through the helpers.
+ Lightning.Repo.delete!(project_credential)
+
+ {:ok, view, _html} = live(conn, detail_path(project, request))
+ html = render(view)
+
+ # After nilify, helper returns "None". Request still renders.
+ assert html =~ "None" or html =~ "(deleted)"
+
+ reloaded =
+ Lightning.Channels.get_channel_request_for_project(
+ project.id,
+ request.id
+ )
+
+ assert reloaded.destination_credential_id == nil
+ end
+
+ test "helper renders '(deleted)' for stale in-memory record with id but nil association" do
+ # Directly test the helper to cover the belt-and-suspenders path
+ # (id present, association nil) that the schema's on_delete: :nilify_all
+ # prevents us from producing through the DB.
+ stale = %Lightning.Channels.ChannelRequest{
+ client_webhook_auth_method_id: Ecto.UUID.generate(),
+ client_auth_type: "api",
+ client_webhook_auth_method: nil,
+ destination_credential_id: Ecto.UUID.generate(),
+ destination_credential: nil
+ }
+
+ assert LightningWeb.ChannelRequestLive.Helpers.format_client_auth(stale) =~
+ "(deleted)"
+
+ assert LightningWeb.ChannelRequestLive.Helpers.format_destination_auth(
+ stale
+ ) == "(deleted)"
+ end
+ end
+
+ describe "navigation" do
+ setup [:register_and_log_in_user, :create_project_for_current_user]
+ setup :enable_experimental_features
+
+ test "breadcrumbs render correctly", %{conn: conn, project: project} do
+ {request, _channel, _snapshot} = create_channel_request(project)
+ insert(:channel_event, channel_request: request)
+
+ {:ok, view, _html} = live(conn, detail_path(project, request))
+ html = render(view)
+
+ assert html =~ "History"
+ assert html =~ "Channel"
+ assert html =~ String.slice(request.id, 0..7)
+ end
+
+ test "channel logs table rows link to the detail page", %{
+ conn: conn,
+ project: project
+ } do
+ channel = insert(:channel, project: project, name: "link-test")
+ {:ok, snapshot} = Channels.get_or_create_current_snapshot(channel)
+
+ request =
+ insert(:channel_request,
+ channel: channel,
+ channel_snapshot: snapshot,
+ state: :success,
+ started_at: DateTime.utc_now()
+ )
+
+ insert(:channel_event, channel_request: request)
+
+ {:ok, view, _html} =
+ live(conn, ~p"/projects/#{project.id}/history/channels")
+
+ html = render(view)
+
+ assert html =~
+ ~r/href="[^"]*\/projects\/#{project.id}\/history\/channels\/#{request.id}"/
+ end
+ end
+end
diff --git a/test/lightning_web/plugs/channel_proxy_plug_test.exs b/test/lightning_web/plugs/channel_proxy_plug_test.exs
index 0e48d90e469..dc7a9890263 100644
--- a/test/lightning_web/plugs/channel_proxy_plug_test.exs
+++ b/test/lightning_web/plugs/channel_proxy_plug_test.exs
@@ -368,6 +368,126 @@ defmodule LightningWeb.ChannelProxyPlugTest do
end
end
+ describe "non-UTF-8 body handling (issue #4541)" do
+ # `response_body_preview` and `request_body_preview` are stored as :text
+ # (UTF-8 only). When an upstream returns binary content (gzip, image, PDF,
+ # ...) the bytes can't be persisted as text. Rather than failing the whole
+ # insert, we drop the offending preview to nil and persist everything else
+ # — headers, hash, size, timing.
+
+ test "non-UTF-8 response body is dropped from preview; rest of event persisted",
+ %{bypass: bypass, channel: channel} do
+ # `0x8b` is the second byte of the gzip magic number — exactly the byte
+ # Postgres rejected in the dev reproduction. We send it raw so Finch
+ # cannot transparently decompress it.
+ raw_bytes = <<0x1F, 0x8B, 0x08, 0x00, 0xFF, 0xFE>>
+
+ Bypass.expect_once(bypass, "GET", "/binary", fn conn ->
+ conn
+ |> Plug.Conn.put_resp_header("content-type", "application/octet-stream")
+ |> Plug.Conn.send_resp(200, raw_bytes)
+ end)
+
+ resp =
+ conn(:get, "/channels/#{channel.id}/binary")
+ |> send_to_endpoint()
+
+ assert resp.status == 200
+
+ request =
+ Lightning.Repo.one!(
+ from(r in ChannelRequest, where: r.channel_id == ^channel.id)
+ )
+
+ # The request itself succeeded — only the body preview is unstorable.
+ assert request.state == :success
+ assert request.completed_at != nil
+
+ event =
+ Lightning.Repo.one!(
+ from(e in ChannelEvent, where: e.channel_request_id == ^request.id)
+ )
+
+ assert event.type == :destination_response
+ assert event.request_method == "GET"
+ assert event.request_path == "/binary"
+ assert event.response_status == 200
+
+ # Body preview dropped because it isn't valid UTF-8.
+ assert event.response_body_preview == nil
+
+ # Hash and size are still recorded so the audit log can show that a body
+ # was returned, even though the bytes couldn't be persisted as text.
+ assert is_binary(event.response_body_hash)
+ assert event.response_body_size == byte_size(raw_bytes)
+
+ # Headers and timing persist as usual.
+ assert is_list(event.request_headers) and event.request_headers != []
+ assert is_list(event.response_headers) and event.response_headers != []
+ assert is_integer(event.latency_us) and event.latency_us > 0
+ end
+
+ test "non-UTF-8 request body is dropped from preview; rest of event persisted",
+ %{bypass: bypass, channel: channel} do
+ raw_bytes = <<0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A>>
+
+ Bypass.expect_once(bypass, "POST", "/upload", fn conn ->
+ Plug.Conn.send_resp(conn, 201, "ok")
+ end)
+
+ resp =
+ conn(:post, "/channels/#{channel.id}/upload", raw_bytes)
+ |> Plug.Conn.put_req_header("content-type", "application/octet-stream")
+ |> Plug.Conn.put_req_header("content-length", "#{byte_size(raw_bytes)}")
+ |> send_to_endpoint()
+
+ assert resp.status == 201
+
+ request =
+ Lightning.Repo.one!(
+ from(r in ChannelRequest, where: r.channel_id == ^channel.id)
+ )
+
+ assert request.state == :success
+
+ event =
+ Lightning.Repo.one!(
+ from(e in ChannelEvent, where: e.channel_request_id == ^request.id)
+ )
+
+ assert event.request_method == "POST"
+ assert event.request_body_preview == nil
+ assert is_binary(event.request_body_hash)
+ assert event.request_body_size == byte_size(raw_bytes)
+ end
+
+ test "valid UTF-8 body is preserved unchanged",
+ %{bypass: bypass, channel: channel} do
+ Bypass.expect_once(bypass, "GET", "/json", fn conn ->
+ conn
+ |> Plug.Conn.put_resp_header("content-type", "application/json")
+ |> Plug.Conn.send_resp(200, ~s({"hello":"world"}))
+ end)
+
+ resp =
+ conn(:get, "/channels/#{channel.id}/json")
+ |> send_to_endpoint()
+
+ assert resp.status == 200
+
+ event =
+ Lightning.Repo.one!(
+ from(e in ChannelEvent,
+ join: r in ChannelRequest,
+ on: r.id == e.channel_request_id,
+ where: r.channel_id == ^channel.id
+ )
+ )
+
+ assert event.response_body_preview == ~s({"hello":"world"})
+ end
+ end
+
describe "handler persistence" do
test "creates ChannelRequest and ChannelEvent on successful proxy", %{
conn: conn,
@@ -396,7 +516,7 @@ defmodule LightningWeb.ChannelProxyPlugTest do
assert event.type == :destination_response
assert event.response_status == 200
- assert event.latency_ms != nil
+ assert event.latency_us != nil
assert event.request_method == "GET"
assert event.request_path == "/persisted"
end
@@ -866,10 +986,9 @@ defmodule LightningWeb.ChannelProxyPlugTest do
)
# The handler redacts authorization headers before persisting
- headers = Jason.decode!(event.request_headers)
-
+ # Headers are native jsonb arrays, no JSON decoding needed
auth_header =
- Enum.find(headers, fn [k, _v] -> k == "authorization" end)
+ Enum.find(event.request_headers, fn [k, _v] -> k == "authorization" end)
assert auth_header == ["authorization", "[REDACTED]"]
end
@@ -985,6 +1104,269 @@ defmodule LightningWeb.ChannelProxyPlugTest do
end
end
+ # ---------------------------------------------------------------
+ # Phase 1a contract tests — query string + client auth tracking
+ # ---------------------------------------------------------------
+ #
+ # These tests define the target interface after:
+ # - D1: request_query_string on channel_events
+ # - D3: client_webhook_auth_method_id and client_auth_type on channel_requests
+ # - D4: Proxy plug passes query string and auth info into handler state
+ #
+ # They will not compile/pass until Phase 1b implements the changes.
+
+ describe "query string persistence" do
+ test "persists query string on channel event", %{
+ bypass: bypass,
+ channel: channel
+ } do
+ Bypass.expect_once(bypass, "GET", "/search", fn conn ->
+ Plug.Conn.send_resp(conn, 200, "results")
+ end)
+
+ conn(:get, "/channels/#{channel.id}/search?q=foo&page=2")
+ |> send_to_endpoint()
+
+ event =
+ Lightning.Repo.one!(
+ from(e in ChannelEvent,
+ join: r in ChannelRequest,
+ on: r.id == e.channel_request_id,
+ where: r.channel_id == ^channel.id
+ )
+ )
+
+ assert event.request_query_string == "q=foo&page=2"
+ end
+
+ test "empty query string when no params", %{
+ bypass: bypass,
+ channel: channel
+ } do
+ Bypass.expect_once(bypass, "GET", "/plain", fn conn ->
+ Plug.Conn.send_resp(conn, 200, "ok")
+ end)
+
+ conn(:get, "/channels/#{channel.id}/plain")
+ |> send_to_endpoint()
+
+ event =
+ Lightning.Repo.one!(
+ from(e in ChannelEvent,
+ join: r in ChannelRequest,
+ on: r.id == e.channel_request_id,
+ where: r.channel_id == ^channel.id
+ )
+ )
+
+ assert event.request_query_string == ""
+ end
+ end
+
+ describe "client auth tracking" do
+ test "persists auth method ID and type for API key auth", %{bypass: bypass} do
+ channel =
+ create_client_auth_channel(bypass, [
+ %{auth_type: :api, api_key: "track-me"}
+ ])
+
+ auth_method =
+ channel
+ |> Lightning.Repo.preload(client_webhook_auth_methods: [])
+ |> Map.get(:client_webhook_auth_methods)
+ |> hd()
+
+ Bypass.expect_once(bypass, "GET", "/tracked", fn conn ->
+ Plug.Conn.send_resp(conn, 200, "ok")
+ end)
+
+ conn(:get, "/channels/#{channel.id}/tracked")
+ |> put_req_header("x-api-key", "track-me")
+ |> send_to_endpoint()
+
+ request =
+ Lightning.Repo.one!(
+ from(r in ChannelRequest, where: r.channel_id == ^channel.id)
+ )
+
+ assert request.client_webhook_auth_method_id == auth_method.id
+ assert request.client_auth_type == "api"
+ end
+
+ test "persists auth method ID and type for Basic auth", %{bypass: bypass} do
+ channel =
+ create_client_auth_channel(bypass, [
+ %{auth_type: :basic, username: "user", password: "pass"}
+ ])
+
+ auth_method =
+ channel
+ |> Lightning.Repo.preload(client_webhook_auth_methods: [])
+ |> Map.get(:client_webhook_auth_methods)
+ |> hd()
+
+ Bypass.expect_once(bypass, "GET", "/tracked", fn conn ->
+ Plug.Conn.send_resp(conn, 200, "ok")
+ end)
+
+ encoded = Base.encode64("user:pass")
+
+ conn(:get, "/channels/#{channel.id}/tracked")
+ |> put_req_header("authorization", "Basic #{encoded}")
+ |> send_to_endpoint()
+
+ request =
+ Lightning.Repo.one!(
+ from(r in ChannelRequest, where: r.channel_id == ^channel.id)
+ )
+
+ assert request.client_webhook_auth_method_id == auth_method.id
+ assert request.client_auth_type == "basic"
+ end
+
+ test "nil auth method when no client auth configured", %{
+ bypass: bypass,
+ channel: channel
+ } do
+ Bypass.expect_once(bypass, "GET", "/open", fn conn ->
+ Plug.Conn.send_resp(conn, 200, "ok")
+ end)
+
+ conn(:get, "/channels/#{channel.id}/open")
+ |> send_to_endpoint()
+
+ request =
+ Lightning.Repo.one!(
+ from(r in ChannelRequest, where: r.channel_id == ^channel.id)
+ )
+
+ assert request.client_webhook_auth_method_id == nil
+ assert request.client_auth_type == nil
+ end
+ end
+
+ describe "destination auth tracking" do
+ test "persists destination_credential_id on successful proxy with destination auth",
+ %{bypass: bypass} do
+ channel =
+ create_destination_auth_channel(bypass, "http", %{
+ "access_token" => "tok-123"
+ })
+
+ project_credential_id =
+ channel
+ |> Lightning.Repo.preload(destination_auth_method: :project_credential)
+ |> get_in([
+ Access.key(:destination_auth_method),
+ Access.key(:project_credential_id)
+ ])
+
+ Bypass.expect_once(bypass, "GET", "/dest-track", fn conn ->
+ Plug.Conn.send_resp(conn, 200, "ok")
+ end)
+
+ conn(:get, "/channels/#{channel.id}/dest-track")
+ |> send_to_endpoint()
+
+ request =
+ Lightning.Repo.one!(
+ from(r in ChannelRequest, where: r.channel_id == ^channel.id)
+ )
+
+ assert request.destination_credential_id == project_credential_id
+ refute is_nil(project_credential_id)
+ end
+
+ test "persists destination_credential_id even when credential resolution fails",
+ %{bypass: _bypass} do
+ # Channel with a destination auth method but credential missing auth
+ # fields — destination auth resolution fails, but we still know which
+ # credential was configured.
+ project = insert(:project)
+ user = insert(:user)
+
+ credential =
+ insert(:credential, schema: "http", name: "bad-cred", user: user)
+ |> with_body(%{body: %{"baseUrl" => "https://example.com"}})
+
+ project_credential =
+ insert(:project_credential, project: project, credential: credential)
+
+ channel =
+ insert(:channel,
+ project: project,
+ destination_url: "http://localhost:9999",
+ enabled: true,
+ channel_auth_methods: [
+ build(:channel_auth_method,
+ role: :destination,
+ webhook_auth_method: nil,
+ project_credential: project_credential
+ )
+ ]
+ )
+
+ resp =
+ conn(:get, "/channels/#{channel.id}/test")
+ |> send_to_endpoint()
+
+ assert resp.status == 502
+
+ request =
+ Lightning.Repo.one!(
+ from(r in ChannelRequest, where: r.channel_id == ^channel.id)
+ )
+
+ assert request.destination_credential_id == project_credential.id
+ assert request.state == :error
+ end
+
+ test "destination_credential_id is nil when no destination auth configured",
+ %{bypass: bypass, channel: channel} do
+ Bypass.expect_once(bypass, "GET", "/no-dest-auth", fn conn ->
+ Plug.Conn.send_resp(conn, 200, "ok")
+ end)
+
+ conn(:get, "/channels/#{channel.id}/no-dest-auth")
+ |> send_to_endpoint()
+
+ request =
+ Lightning.Repo.one!(
+ from(r in ChannelRequest, where: r.channel_id == ^channel.id)
+ )
+
+ assert request.destination_credential_id == nil
+ end
+ end
+
+ describe "collect_timing integration" do
+ test "persists per-direction timing after successful proxy", %{
+ bypass: bypass,
+ channel: channel
+ } do
+ Bypass.expect_once(bypass, "GET", "/timed", fn conn ->
+ Plug.Conn.send_resp(conn, 200, "ok")
+ end)
+
+ conn(:get, "/channels/#{channel.id}/timed")
+ |> send_to_endpoint()
+
+ event =
+ Lightning.Repo.one!(
+ from(e in ChannelEvent,
+ join: r in ChannelRequest,
+ on: r.id == e.channel_request_id,
+ where: r.channel_id == ^channel.id
+ )
+ )
+
+ # With collect_timing: true, Philter populates timing.send_us
+ # which the handler persists as request_send_us
+ assert is_integer(event.request_send_us)
+ assert event.request_send_us >= 0
+ end
+ end
+
defp send_to_endpoint(conn) do
LightningWeb.Endpoint.call(conn, LightningWeb.Endpoint.init([]))
end
diff --git a/test/support/factories.ex b/test/support/factories.ex
index ffa99da33d0..94c2a6de221 100644
--- a/test/support/factories.ex
+++ b/test/support/factories.ex
@@ -1,5 +1,7 @@
defmodule Lightning.Factories do
use ExMachina.Ecto, repo: Lightning.Repo
+ use Lightning.Factories.ChannelFactories
+
alias Lightning.Workflows.Snapshot
def webhook_auth_method_factory do
@@ -858,47 +860,4 @@ defmodule Lightning.Factories do
def sandbox_for(parent, attrs \\ %{}) do
build(:project, Map.merge(%{parent: parent}, attrs))
end
-
- def channel_factory do
- %Lightning.Channels.Channel{
- project: build(:project),
- name: sequence(:channel_name, &"channel-#{&1}"),
- destination_url:
- sequence(
- :channel_destination_url,
- &"https://example.com/destination/#{&1}"
- ),
- enabled: true
- }
- end
-
- def channel_auth_method_factory do
- %Lightning.Channels.ChannelAuthMethod{
- role: :client,
- webhook_auth_method: build(:webhook_auth_method)
- }
- end
-
- def channel_snapshot_factory do
- %Lightning.Channels.ChannelSnapshot{
- lock_version: 1,
- name: sequence(:channel_snapshot_name, &"channel-#{&1}"),
- destination_url: "https://example.com/destination",
- enabled: true
- }
- end
-
- def channel_request_factory do
- %Lightning.Channels.ChannelRequest{
- request_id: sequence(:channel_request_id, &"req-#{&1}"),
- state: :pending,
- started_at: DateTime.utc_now()
- }
- end
-
- def channel_event_factory do
- %Lightning.Channels.ChannelEvent{
- type: :destination_response
- }
- end
end
diff --git a/test/support/factories/channel_factories.ex b/test/support/factories/channel_factories.ex
new file mode 100644
index 00000000000..a4a88540a72
--- /dev/null
+++ b/test/support/factories/channel_factories.ex
@@ -0,0 +1,78 @@
+defmodule Lightning.Factories.ChannelFactories do
+ @moduledoc false
+
+ defmacro __using__(_opts) do
+ quote do
+ def channel_factory do
+ %Lightning.Channels.Channel{
+ project: build(:project),
+ name: sequence(:channel_name, &"channel-#{&1}"),
+ destination_url:
+ sequence(
+ :channel_destination_url,
+ &"https://example.com/destination/#{&1}"
+ ),
+ enabled: true
+ }
+ end
+
+ def channel_auth_method_factory do
+ %Lightning.Channels.ChannelAuthMethod{
+ role: :client,
+ webhook_auth_method: build(:webhook_auth_method)
+ }
+ end
+
+ def channel_snapshot_factory do
+ %Lightning.Channels.ChannelSnapshot{
+ lock_version: 1,
+ name: sequence(:channel_snapshot_name, &"channel-#{&1}"),
+ destination_url: "https://example.com/destination",
+ enabled: true
+ }
+ end
+
+ def channel_request_factory do
+ %Lightning.Channels.ChannelRequest{
+ request_id: sequence(:channel_request_id, &"req-#{&1}"),
+ client_identity: "127.0.0.1",
+ state: :pending,
+ started_at: DateTime.utc_now()
+ }
+ end
+
+ def channel_event_factory do
+ %Lightning.Channels.ChannelEvent{
+ type: :destination_response,
+ request_method: "POST",
+ request_path: "/api/v1/data",
+ request_headers: [
+ ["content-type", "application/json"],
+ ["authorization", "[REDACTED]"]
+ ],
+ request_body_preview: ~s({"key":"value"}),
+ request_body_hash: "abc123def456",
+ request_body_size: 15,
+ response_status: 200,
+ response_headers: [["content-type", "application/json"]],
+ response_body_preview: ~s({"status":"ok"}),
+ response_body_hash: "def456abc123",
+ response_body_size: 15,
+ latency_us: 350_000,
+ ttfb_us: 280_000,
+ request_send_us: 5000,
+ response_duration_us: 65000
+ }
+ end
+
+ def channel_error_event_factory do
+ %Lightning.Channels.ChannelEvent{
+ type: :error,
+ request_method: "POST",
+ request_path: "/api/v1/data",
+ request_headers: [["content-type", "application/json"]]
+ }
+ end
+ end
+ end
+end