Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .github/workflows/elixir.yml
Original file line number Diff line number Diff line change
Expand Up @@ -70,5 +70,8 @@ jobs:
- run: mix compile --warnings-as-errors
if: ${{ matrix.lint }}

- run: mix dialyzer
if: ${{ matrix.lint }}

- name: Run tests
run: mix test
6 changes: 3 additions & 3 deletions lib/chains/llm_chain.ex
Original file line number Diff line number Diff line change
Expand Up @@ -541,11 +541,11 @@ defmodule LangChain.Chains.LLMChain do

defp initial_run_logging(%LLMChain{verbose: true} = chain) do
# set the callback function on the chain
if chain.verbose, do: IO.inspect(chain.llm, label: "LLM")
IO.inspect(chain.llm, label: "LLM")

if chain.verbose, do: IO.inspect(chain.messages, label: "MESSAGES")
IO.inspect(chain.messages, label: "MESSAGES")

if chain.verbose, do: IO.inspect(chain.tools, label: "TOOLS")
IO.inspect(chain.tools, label: "TOOLS")

:ok
end
Expand Down
9 changes: 2 additions & 7 deletions lib/chat_models/chat_anthropic.ex
Original file line number Diff line number Diff line change
Expand Up @@ -648,7 +648,7 @@ defmodule LangChain.ChatModels.ChatAnthropic do
|> Req.post(
into:
Utils.handle_stream_fn(
anthropic,
Map.take(anthropic, [:stream]),
&decode_stream(anthropic, &1),
&do_process_response(anthropic, &1)
)
Expand All @@ -670,11 +670,6 @@ defmodule LangChain.ChatModels.ChatAnthropic do

data

# The error tuple was successfully received from the API. Unwrap it and
# return it as an error.
{:ok, {:error, %LangChainError{} = error}} ->
{:error, error}

{:error, %Req.TransportError{reason: :timeout} = err} ->
{:error,
LangChainError.exception(type: "timeout", message: "Request timed out", original: err)}
Expand Down Expand Up @@ -1212,7 +1207,7 @@ defmodule LangChain.ChatModels.ChatAnthropic do
@doc """
Convert a LangChain structure to the expected map of data for the Anthropic API.
"""
@spec for_api(Message.t() | ContentPart.t() | Function.t()) ::
@spec for_api(ToolCall.t() | ToolResult.t()) ::
%{String.t() => any()} | no_return()
# def for_api(%Message{role: :assistant, tool_calls: calls} = msg)
# when is_list(calls) and calls != [] do
Expand Down
10 changes: 3 additions & 7 deletions lib/chat_models/chat_deepseek.ex
Original file line number Diff line number Diff line change
Expand Up @@ -698,7 +698,7 @@ defmodule LangChain.ChatModels.ChatDeepSeek do
|> Req.post(
into:
Utils.handle_stream_fn(
deepseek,
Map.take(deepseek, [:stream]),
&decode_stream/1,
&do_process_response(deepseek, &1)
)
Expand Down Expand Up @@ -1057,12 +1057,8 @@ defmodule LangChain.ChatModels.ChatDeepSeek do
defp merge_response_metadata(message, response_data) do
response_metadata = extract_response_metadata(response_data)

if map_size(response_metadata) > 0 do
current_metadata = message.metadata || %{}
%{message | metadata: Map.merge(current_metadata, response_metadata)}
else
message
end
current_metadata = message.metadata || %{}
%{message | metadata: Map.merge(current_metadata, response_metadata)}
end

defp finish_reason_to_status(nil), do: :incomplete
Expand Down
4 changes: 2 additions & 2 deletions lib/chat_models/chat_google_ai.ex
Original file line number Diff line number Diff line change
Expand Up @@ -577,7 +577,7 @@ defmodule LangChain.ChatModels.ChatGoogleAI do
|> Req.post(
into:
Utils.handle_stream_fn(
google_ai,
Map.take(google_ai, [:stream]),
&ChatOpenAI.decode_stream/1,
&do_process_response(google_ai, &1, MessageDelta)
)
Expand Down Expand Up @@ -840,7 +840,7 @@ defmodule LangChain.ChatModels.ChatGoogleAI do
@doc """
Return the content parts for the message.
"""
@spec get_message_contents(MessageDelta.t() | Message.t()) :: [%{String.t() => any()}]
@spec get_message_contents(MessageDelta.t() | Message.t()) :: [%{String.t() => any()}] | nil
def get_message_contents(%{content: content} = _message) when is_binary(content) do
[%{"text" => content}]
end
Expand Down
6 changes: 3 additions & 3 deletions lib/chat_models/chat_mistral_ai.ex
Original file line number Diff line number Diff line change
Expand Up @@ -461,7 +461,7 @@ defmodule LangChain.ChatModels.ChatMistralAI do
|> Req.post(
into:
Utils.handle_stream_fn(
mistralai,
Map.take(mistralai, [:stream]),
# Mistral's streaming API is mostly compatible with OpenAI's,
# so we can reuse the same decoder
&ChatOpenAI.decode_stream/1,
Expand Down Expand Up @@ -491,15 +491,15 @@ defmodule LangChain.ChatModels.ChatMistralAI do
# Parse final or partial responses to produce the appropriate LangChain structure.
@doc false
@spec do_process_response(
%{:callbacks => [map()]},
t(),
data :: %{String.t() => any()} | {:error, any()}
) ::
:skip
| Message.t()
| [Message.t()]
| MessageDelta.t()
| [MessageDelta.t()]
| {:error, String.t()}
| {:error, LangChainError.t()}
# The last chunk of the response contains both the final delta in the "choices" key,
# and the token usage in the "usage" key
def do_process_response(model, %{"choices" => choices, "usage" => %{} = _usage} = data) do
Expand Down
2 changes: 1 addition & 1 deletion lib/chat_models/chat_ollama_ai.ex
Original file line number Diff line number Diff line change
Expand Up @@ -524,7 +524,7 @@ defmodule LangChain.ChatModels.ChatOllamaAI do
|> Req.post(
into:
Utils.handle_stream_fn(
ollama_ai,
Map.take(ollama_ai, [:stream]),
&ChatOpenAI.decode_stream/1,
&do_process_response(ollama_ai, &1)
)
Expand Down
6 changes: 3 additions & 3 deletions lib/chat_models/chat_open_ai.ex
Original file line number Diff line number Diff line change
Expand Up @@ -875,7 +875,7 @@ defmodule LangChain.ChatModels.ChatOpenAI do
|> Req.post(
into:
Utils.handle_stream_fn(
openai,
Map.take(openai, [:stream]),
&decode_stream/1,
&do_process_response(openai, &1)
)
Expand Down Expand Up @@ -978,15 +978,15 @@ defmodule LangChain.ChatModels.ChatOpenAI do
# Parse a new message response
@doc false
@spec do_process_response(
%{:callbacks => [map()]},
t(),
data :: %{String.t() => any()} | {:error, any()}
) ::
:skip
| Message.t()
| [Message.t()]
| MessageDelta.t()
| [MessageDelta.t()]
| {:error, String.t()}
| {:error, LangChainError.t()}
def do_process_response(model, %{"choices" => _choices} = data) do
token_usage = get_token_usage(data)

Expand Down
11 changes: 8 additions & 3 deletions lib/chat_models/chat_open_ai_responses.ex
Original file line number Diff line number Diff line change
Expand Up @@ -773,7 +773,12 @@ defmodule LangChain.ChatModels.ChatOpenAIResponses do
|> maybe_add_org_id_header()
|> maybe_add_proj_id_header()
|> Req.post(
into: Utils.handle_stream_fn(openai, &decode_stream/1, &do_process_response(openai, &1))
into:
Utils.handle_stream_fn(
Map.take(openai, [:stream]),
&decode_stream/1,
&do_process_response(openai, &1)
)
)
|> case do
{:ok, %Req.Response{body: data} = response} ->
Expand Down Expand Up @@ -866,15 +871,15 @@ defmodule LangChain.ChatModels.ChatOpenAIResponses do
# Parse a new message response
@doc false
@spec do_process_response(
%{:callbacks => [map()]},
t(),
data :: %{String.t() => any()} | {:error, any()}
) ::
:skip
| Message.t()
| [Message.t()]
| MessageDelta.t()
| [MessageDelta.t()]
| {:error, String.t()}
| {:error, LangChainError.t()}

# Complete Response with output lists
def do_process_response(
Expand Down
6 changes: 3 additions & 3 deletions lib/chat_models/chat_orq.ex
Original file line number Diff line number Diff line change
Expand Up @@ -700,7 +700,7 @@ defmodule LangChain.ChatModels.ChatOrq do
|> Req.post(
into:
Utils.handle_stream_fn(
orq,
Map.take(orq, [:stream]),
&decode_stream/1,
&do_process_response(orq, &1)
)
Expand Down Expand Up @@ -744,15 +744,15 @@ defmodule LangChain.ChatModels.ChatOrq do
# Parse responses (compatible with OpenAI-like shapes used by orq)
@doc false
@spec do_process_response(
%{:callbacks => [map()]},
t(),
data :: %{String.t() => any()} | {:error, any()}
) ::
:skip
| Message.t()
| [Message.t()]
| MessageDelta.t()
| [MessageDelta.t()]
| {:error, String.t()}
| {:error, LangChainError.t()}
def do_process_response(model, %{"choices" => _choices} = data) do
token_usage = get_token_usage(data)

Expand Down
2 changes: 1 addition & 1 deletion lib/chat_models/chat_perplexity.ex
Original file line number Diff line number Diff line change
Expand Up @@ -427,7 +427,7 @@ defmodule LangChain.ChatModels.ChatPerplexity do
|> Req.post(
into:
Utils.handle_stream_fn(
perplexity,
Map.take(perplexity, [:stream]),
&decode_stream/1,
&process_stream_chunk(perplexity, &1)
)
Expand Down
4 changes: 2 additions & 2 deletions lib/chat_models/chat_vertex_ai.ex
Original file line number Diff line number Diff line change
Expand Up @@ -445,7 +445,7 @@ defmodule LangChain.ChatModels.ChatVertexAI do
|> Req.post(
into:
Utils.handle_stream_fn(
vertex_ai,
Map.take(vertex_ai, [:stream]),
&ChatOpenAI.decode_stream/1,
&do_process_response(vertex_ai, &1, MessageDelta)
)
Expand Down Expand Up @@ -703,7 +703,7 @@ defmodule LangChain.ChatModels.ChatVertexAI do
@doc """
Return the content parts for the message.
"""
@spec get_message_contents(MessageDelta.t() | Message.t()) :: [%{String.t() => any()}]
@spec get_message_contents(MessageDelta.t() | Message.t()) :: [%{String.t() => any()}] | nil
def get_message_contents(%{content: content} = _message) when is_binary(content) do
[%{"text" => content}]
end
Expand Down
2 changes: 1 addition & 1 deletion lib/message_delta.ex
Original file line number Diff line number Diff line change
Expand Up @@ -476,7 +476,7 @@ defmodule LangChain.MessageDelta do
%MessageDelta{metadata: %{usage: new_usage}} = _delta_part
)
when not is_nil(new_usage) do
current_usage = TokenUsage.get(primary)
current_usage = TokenUsage.get(Map.take(primary, [:metadata]))
combined_usage = TokenUsage.add(current_usage, new_usage)

%MessageDelta{primary | metadata: Map.put(primary.metadata || %{}, :usage, combined_usage)}
Expand Down
2 changes: 1 addition & 1 deletion lib/token_usage.ex
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,7 @@ defmodule LangChain.TokenUsage do
Sets the token usage information on a `LangChain.Message` or
`LangChain.MessageDelta` struct when wrapped in an :ok,:error tuple in the `metadata` under the `:usage` key.
"""
@spec set_wrapped({:ok, %{metadata: nil | map()}}, nil | t()) ::
@spec set_wrapped({:ok, %{metadata: nil | map()}} | {:error, Ecto.Changeset.t()}, nil | t()) ::
{:ok, %{metadata: %{usage: t()}} | {:error, any()}}
def set_wrapped({:ok, message}, usage) do
{:ok, set(message, usage)}
Expand Down
14 changes: 2 additions & 12 deletions lib/tools/deep_research.ex
Original file line number Diff line number Diff line change
Expand Up @@ -195,7 +195,7 @@ defmodule LangChain.Tools.DeepResearch do
end

@spec poll_with_backoff(String.t(), integer(), integer(), integer()) ::
{:ok, map()} | {:error, String.t()}
{:ok, DeepResearchClient.research_result()} | {:error, String.t()}
defp poll_with_backoff(request_id, interval, max_interval, attempts) do
case DeepResearchClient.check_status(request_id) do
{:ok, %{status: "completed"}} ->
Expand Down Expand Up @@ -234,7 +234,7 @@ defmodule LangChain.Tools.DeepResearch do

# Formats the research result for LLM consumption, including the main findings
# and a summary of sources used.
@spec format_research_result(map()) :: String.t()
@spec format_research_result(DeepResearchClient.research_result()) :: String.t()
defp format_research_result(%{output_text: text, sources: sources}) when is_list(sources) do
source_summary = format_sources(sources)

Expand All @@ -249,14 +249,6 @@ defmodule LangChain.Tools.DeepResearch do
"""
end

defp format_research_result(%{output_text: text}) do
text
end

defp format_research_result(result) do
"Research completed. Result: #{inspect(result)}"
end

@spec format_sources(list()) :: String.t()
defp format_sources(sources) when is_list(sources) do
sources
Expand All @@ -268,6 +260,4 @@ defmodule LangChain.Tools.DeepResearch do
end)
|> Enum.join("\n")
end

defp format_sources(_), do: "No source information available."
end
13 changes: 12 additions & 1 deletion lib/tools/deep_research/research_request.ex
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,17 @@ defmodule LangChain.Tools.DeepResearch.ResearchRequest do
summary: String.t()
}

@type precast_t() :: %__MODULE__{
query: String.t() | nil,
model: String.t() | nil,
system_message: String.t() | nil,
max_tool_calls: integer() | nil,
background: boolean() | nil,
temperature: float() | nil,
max_output_tokens: integer() | nil,
summary: String.t() | nil
}

@primary_key false
embedded_schema do
field :query, :string
Expand All @@ -34,7 +45,7 @@ defmodule LangChain.Tools.DeepResearch.ResearchRequest do
@doc """
Creates a changeset for a research request.
"""
@spec changeset(__MODULE__.t(), map()) :: Ecto.Changeset.t()
@spec changeset(__MODULE__.precast_t(), map()) :: Ecto.Changeset.t()
def changeset(request \\ %__MODULE__{}, attrs) do
request
|> cast(attrs, [
Expand Down
13 changes: 12 additions & 1 deletion lib/tools/deep_research/research_result.ex
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,17 @@ defmodule LangChain.Tools.DeepResearch.ResearchResult do
tool_calls: [any()]
}

@type precast_t() :: %__MODULE__{
id: String.t() | nil,
output_text: String.t() | nil,
model: String.t() | nil,
created_at: integer() | nil,
completion_time: integer() | nil,
sources: [any()],
usage: any() | nil,
tool_calls: [any()]
}

@primary_key false
embedded_schema do
field :id, :string
Expand Down Expand Up @@ -53,7 +64,7 @@ defmodule LangChain.Tools.DeepResearch.ResearchResult do
@doc """
Creates a changeset for research result.
"""
@spec changeset(__MODULE__.t(), map()) :: Ecto.Changeset.t()
@spec changeset(__MODULE__.precast_t(), map()) :: Ecto.Changeset.t()
def changeset(result \\ %__MODULE__{}, attrs) do
result
|> cast(attrs, [:id, :output_text, :model, :created_at, :completion_time])
Expand Down
11 changes: 10 additions & 1 deletion lib/tools/deep_research/research_status.ex
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,15 @@ defmodule LangChain.Tools.DeepResearch.ResearchStatus do
progress_info: map() | nil
}

@type precast_t() :: %__MODULE__{
id: String.t() | nil,
status: String.t() | nil,
created_at: integer() | nil,
error_message: String.t() | nil,
error_code: String.t() | nil,
progress_info: map() | nil
}

@primary_key false
embedded_schema do
field :id, :string
Expand All @@ -32,7 +41,7 @@ defmodule LangChain.Tools.DeepResearch.ResearchStatus do
@doc """
Creates a changeset for research status.
"""
@spec changeset(__MODULE__.t(), map()) :: Ecto.Changeset.t()
@spec changeset(__MODULE__.precast_t(), map()) :: Ecto.Changeset.t()
def changeset(status \\ %__MODULE__{}, attrs) do
status
|> cast(attrs, [:id, :status, :created_at, :error_message, :error_code, :progress_info])
Expand Down
Loading