|
14 | 14 | from langchain_core.outputs import LLMResult, ChatGeneration |
15 | 15 | from langchain_core.agents import AgentFinish, AgentAction |
16 | 16 | from langchain_core.prompt_values import PromptValue, ChatPromptValue |
17 | | -from langchain_core.messages import BaseMessage, AIMessageChunk, AIMessage |
| 17 | +from langchain_core.messages import BaseMessage, AIMessageChunk, AIMessage, ToolMessage |
18 | 18 | from langchain_core.prompts import AIMessagePromptTemplate, HumanMessagePromptTemplate, SystemMessagePromptTemplate |
19 | 19 | from langchain_core.outputs import ChatGenerationChunk, GenerationChunk |
20 | 20 |
|
@@ -581,6 +581,15 @@ def _convert_inputs(inputs: Any) -> Any: |
581 | 581 | if inputs.content != '': |
582 | 582 | format_inputs['content'] = inputs.content |
583 | 583 | return format_inputs |
| 584 | + if isinstance(inputs, ToolMessage): |
| 585 | + """ |
| 586 | + Must be before BaseMessage. |
| 587 | + """ |
| 588 | + content = {"content": inputs.content} |
| 589 | + if inputs.artifact is not None: |
| 590 | + content['artifact'] = _convert_inputs(inputs.artifact) # artifact is existed when response_format="content_and_artifact". |
| 591 | + message = Message(role=inputs.type, content=content) |
| 592 | + return message |
584 | 593 | if isinstance(inputs, BaseMessage): |
585 | 594 | message = Message(role=inputs.type, content=inputs.content, |
586 | 595 | tool_calls=inputs.additional_kwargs.get('tool_calls', [])) |
|
0 commit comments