Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
75 changes: 33 additions & 42 deletions examples/graphon_openai_slim/workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -255,48 +255,42 @@ def build_graph(
) -> Graph:
start_node = StartNode(
node_id="start",
config={
"id": "start",
"data": StartNodeData(
title="Start",
variables=[
VariableEntity(
variable="query",
label="Query",
type=VariableEntityType.PARAGRAPH,
required=True,
),
],
),
},
config=StartNodeData(
title="Start",
variables=[
VariableEntity(
variable="query",
label="Query",
type=VariableEntityType.PARAGRAPH,
required=True,
),
],
),
graph_init_params=graph_init_params,
graph_runtime_state=graph_runtime_state,
)

llm_node = LLMNode(
node_id="llm",
config={
"id": "llm",
"data": LLMNodeData(
title="LLM",
model=ModelConfig(
provider=provider,
name="gpt-5.4",
mode=LLMMode.CHAT,
),
prompt_template=[
LLMNodeChatModelMessage(
role=PromptMessageRole.SYSTEM,
text="You are a concise assistant.",
),
LLMNodeChatModelMessage(
role=PromptMessageRole.USER,
text="{{#start.query#}}",
),
],
context=ContextConfig(enabled=False),
config=LLMNodeData(
title="LLM",
model=ModelConfig(
provider=provider,
name="gpt-5.4",
mode=LLMMode.CHAT,
),
},
prompt_template=[
LLMNodeChatModelMessage(
role=PromptMessageRole.SYSTEM,
text="You are a concise assistant.",
),
LLMNodeChatModelMessage(
role=PromptMessageRole.USER,
text="{{#start.query#}}",
),
],
context=ContextConfig(enabled=False),
),
graph_init_params=graph_init_params,
graph_runtime_state=graph_runtime_state,
model_instance=prepared_llm,
Expand All @@ -306,13 +300,10 @@ def build_graph(

output_node = AnswerNode(
node_id="output",
config={
"id": "output",
"data": AnswerNodeData(
title="Output",
answer="{{#llm.text#}}",
),
},
config=AnswerNodeData(
title="Output",
answer="{{#llm.text#}}",
),
graph_init_params=graph_init_params,
graph_runtime_state=graph_runtime_state,
)
Expand Down
36 changes: 25 additions & 11 deletions src/graphon/nodes/base/node.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,6 +174,23 @@ def get_node_type_classes_mapping(
class _NodeDataModelMixin[NodeDataT: BaseNodeData]:
"""Typed node-data hydration helpers."""

@classmethod
def node_data_from_mapping(
cls: type[Node[NodeDataT]],
node_data: Mapping[str, Any],
) -> NodeDataT:
"""Build the concrete node-data instance from a Python mapping.

This convenience wrapper keeps direct node construction ergonomic for
callers that naturally start from plain dictionaries while preserving the
stricter `Node.__init__(..., config=NodeDataT, ...)` contract.

Returns:
The validated node data instance for the concrete node subclass.

"""
return cls.validate_node_data(node_data)

@classmethod
def validate_node_data(
cls: type[Node[NodeDataT]],
Expand Down Expand Up @@ -523,10 +540,15 @@ def _extract_node_data_type_from_generic(cls) -> type[BaseNodeData] | None:
def __init__(
self,
node_id: str,
config: NodeConfigDict,
config: NodeDataT,
*,
graph_init_params: GraphInitParams,
graph_runtime_state: GraphRuntimeState,
) -> None:
if not node_id:
msg = "node_id is required"
raise ValueError(msg)

self._graph_init_params = graph_init_params
self._run_context = MappingProxyType(dict(graph_init_params.run_context))
self.id = node_id
Expand All @@ -536,19 +558,11 @@ def __init__(
self.graph_runtime_state = graph_runtime_state
self.state: NodeState = NodeState.UNKNOWN # node execution state

config_node_id = config["id"]
if node_id != config_node_id:
msg = (
"node_id must match config['id'], "
f"got node_id={node_id!r}, config['id']={config_node_id!r}"
)
raise ValueError(msg)

self._node_id = config_node_id
self._node_id = node_id
self._node_execution_id: str = ""
self._start_at = datetime.now(UTC).replace(tzinfo=None)

self._node_data = self.validate_node_data(config["data"])
self._node_data = self.validate_node_data(config)

self.post_init()

Expand Down
5 changes: 2 additions & 3 deletions src/graphon/nodes/code/code_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
from textwrap import dedent
from typing import TYPE_CHECKING, Any, Protocol, cast, override

from graphon.entities.graph_config import NodeConfigDict
from graphon.enums import BuiltinNodeTypes, WorkflowNodeExecutionStatus
from graphon.node_events.base import NodeRunResult
from graphon.nodes.base.node import Node
Expand Down Expand Up @@ -83,10 +82,10 @@ class CodeNode(Node[CodeNodeData]):
def __init__(
self,
node_id: str,
config: NodeConfigDict,
config: CodeNodeData,
*,
graph_init_params: "GraphInitParams",
graph_runtime_state: "GraphRuntimeState",
*,
code_executor: WorkflowCodeExecutor,
code_limits: CodeNodeLimits,
) -> None:
Expand Down
5 changes: 2 additions & 3 deletions src/graphon/nodes/document_extractor/node.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@
from docx.table import Table
from docx.text.paragraph import Paragraph

from graphon.entities.graph_config import NodeConfigDict
from graphon.enums import BuiltinNodeTypes, WorkflowNodeExecutionStatus
from graphon.file import file_manager
from graphon.file.enums import FileTransferMethod
Expand Down Expand Up @@ -193,10 +192,10 @@ def version(cls) -> str:
def __init__(
self,
node_id: str,
config: NodeConfigDict,
config: DocumentExtractorNodeData,
*,
graph_init_params: "GraphInitParams",
graph_runtime_state: "GraphRuntimeState",
*,
unstructured_api_config: UnstructuredApiConfig | None = None,
http_client: HttpClientProtocol | None = None,
) -> None:
Expand Down
5 changes: 2 additions & 3 deletions src/graphon/nodes/http_request/node.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
from collections.abc import Callable, Mapping, Sequence
from typing import TYPE_CHECKING, Any, override

from graphon.entities.graph_config import NodeConfigDict
from graphon.enums import BuiltinNodeTypes, WorkflowNodeExecutionStatus
from graphon.file.enums import FileTransferMethod
from graphon.file.models import File
Expand Down Expand Up @@ -45,10 +44,10 @@ class HttpRequestNode(Node[HttpRequestNodeData]):
def __init__(
self,
node_id: str,
config: NodeConfigDict,
config: HttpRequestNodeData,
*,
graph_init_params: "GraphInitParams",
graph_runtime_state: "GraphRuntimeState",
*,
http_request_config: HttpRequestNodeConfig,
http_client: HttpClientProtocol | None = None,
tool_file_manager_factory: Callable[[], ToolFileManagerProtocol],
Expand Down
19 changes: 9 additions & 10 deletions src/graphon/nodes/human_input/human_input_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
from datetime import UTC, datetime
from typing import TYPE_CHECKING, Any, override

from graphon.entities.graph_config import NodeConfigDict
from graphon.entities.pause_reason import HumanInputRequired
from graphon.enums import (
BuiltinNodeTypes,
Expand Down Expand Up @@ -64,10 +63,14 @@ class HumanInputNode(Node[HumanInputNodeData]):
def __init__(
self,
node_id: str,
config: NodeConfigDict,
config: HumanInputNodeData,
*,
graph_init_params: "GraphInitParams",
graph_runtime_state: "GraphRuntimeState",
runtime: HumanInputNodeRuntimeProtocol | None = None,
# TODO @-LAN: See https://github.com/langgenius/graphon/issues/new/choose. # noqa: FIX002
# Make `runtime` optional once Graphon provides a default human-input
# runtime adapter instead of requiring an embedding-specific implementation.
runtime: HumanInputNodeRuntimeProtocol,
form_repository: object | None = None,
) -> None:
super().__init__(
Expand All @@ -76,13 +79,9 @@ def __init__(
graph_init_params=graph_init_params,
graph_runtime_state=graph_runtime_state,
)
resolved_runtime = runtime
if resolved_runtime is None:
msg = "runtime is required"
raise ValueError(msg)
if form_repository is not None:
with_form_repository = getattr(
resolved_runtime,
runtime,
"with_form_repository",
None,
)
Expand All @@ -91,8 +90,8 @@ def __init__(
if not isinstance(updated_runtime, HumanInputNodeRuntimeProtocol):
msg = "with_form_repository() must return a HumanInput runtime"
raise TypeError(msg)
resolved_runtime = updated_runtime
self._runtime: HumanInputNodeRuntimeProtocol = resolved_runtime
runtime = updated_runtime
self._runtime: HumanInputNodeRuntimeProtocol = runtime

@classmethod
@override
Expand Down
5 changes: 2 additions & 3 deletions src/graphon/nodes/llm/node.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@
from dataclasses import dataclass, field
from typing import TYPE_CHECKING, Any, Literal, override

from graphon.entities.graph_config import NodeConfigDict
from graphon.entities.graph_init_params import GraphInitParams
from graphon.enums import (
BuiltinNodeTypes,
Expand Down Expand Up @@ -132,10 +131,10 @@ class LLMNode(Node[LLMNodeData]):
def __init__(
self,
node_id: str,
config: NodeConfigDict,
config: LLMNodeData,
*,
graph_init_params: GraphInitParams,
graph_runtime_state: GraphRuntimeState,
*,
credentials_provider: object | None = None,
model_factory: object | None = None,
model_instance: PreparedLLMProtocol,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
from dataclasses import dataclass
from typing import TYPE_CHECKING, Any, override

from graphon.entities.graph_config import NodeConfigDict
from graphon.enums import (
BuiltinNodeTypes,
WorkflowNodeExecutionMetadataKey,
Expand Down Expand Up @@ -139,10 +138,10 @@ class ParameterExtractorNode(Node[ParameterExtractorNodeData]):
def __init__(
self,
node_id: str,
config: NodeConfigDict,
config: ParameterExtractorNodeData,
*,
graph_init_params: "GraphInitParams",
graph_runtime_state: "GraphRuntimeState",
*,
credentials_provider: object | None = None,
model_factory: object | None = None,
model_instance: PreparedLLMProtocol,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
from dataclasses import dataclass
from typing import TYPE_CHECKING, Any, override

from graphon.entities.graph_config import NodeConfigDict
from graphon.entities.graph_init_params import GraphInitParams
from graphon.enums import (
BuiltinNodeTypes,
Expand Down Expand Up @@ -87,10 +86,10 @@ class QuestionClassifierNode(Node[QuestionClassifierNodeData]):
def __init__(
self,
node_id: str,
config: NodeConfigDict,
config: QuestionClassifierNodeData,
*,
graph_init_params: "GraphInitParams",
graph_runtime_state: "GraphRuntimeState",
*,
credentials_provider: object | None = None,
model_factory: object | None = None,
model_instance: PreparedLLMProtocol,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
from collections.abc import Mapping, Sequence
from typing import TYPE_CHECKING, Any, cast, override

from graphon.entities.graph_config import NodeConfigDict
from graphon.enums import BuiltinNodeTypes, WorkflowNodeExecutionStatus
from graphon.node_events.base import NodeRunResult
from graphon.nodes.base.entities import VariableSelector
Expand All @@ -28,10 +27,10 @@ class TemplateTransformNode(Node[TemplateTransformNodeData]):
def __init__(
self,
node_id: str,
config: NodeConfigDict,
config: TemplateTransformNodeData,
*,
graph_init_params: "GraphInitParams",
graph_runtime_state: "GraphRuntimeState",
*,
jinja2_template_renderer: Jinja2TemplateRenderer,
max_output_length: int | None = None,
) -> None:
Expand Down
13 changes: 6 additions & 7 deletions src/graphon/nodes/tool/tool_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
from dataclasses import dataclass, field
from typing import TYPE_CHECKING, Any, override

from graphon.entities.graph_config import NodeConfigDict
from graphon.enums import (
BuiltinNodeTypes,
WorkflowNodeExecutionMetadataKey,
Expand Down Expand Up @@ -61,12 +60,15 @@ class ToolNode(Node[ToolNodeData]):
def __init__(
self,
node_id: str,
config: NodeConfigDict,
config: ToolNodeData,
*,
graph_init_params: "GraphInitParams",
graph_runtime_state: "GraphRuntimeState",
*,
tool_file_manager_factory: ToolFileManagerProtocol,
runtime: ToolNodeRuntimeProtocol | None = None,
# TODO @-LAN: See https://github.com/langgenius/graphon/issues/new/choose. # noqa: FIX002
# Make `runtime` optional once Graphon provides a default tool runtime
# adapter at the workflow boundary.
runtime: ToolNodeRuntimeProtocol,
) -> None:
super().__init__(
node_id=node_id,
Expand All @@ -75,9 +77,6 @@ def __init__(
graph_runtime_state=graph_runtime_state,
)
self._tool_file_manager_factory = tool_file_manager_factory
if runtime is None:
msg = "runtime is required"
raise ValueError(msg)
self._runtime = runtime

def init_tool_runtime(
Expand Down
Loading
Loading