Skip to content

Commit 3d46e06

Browse files
Merge pull request #17 from XyLearningProgramming/bugfix/empty-assis-content
🐛 forced assist content to be not empty for llama cpp
2 parents cba9f62 + d889ffa commit 3d46e06

1 file changed

Lines changed: 29 additions & 1 deletion

File tree

slm_server/model.py

Lines changed: 29 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,14 @@
1313
ChatCompletionTool,
1414
ChatCompletionToolChoiceOption,
1515
)
16-
from pydantic import BaseModel, ConfigDict, Field, conlist, model_validator
16+
from pydantic import (
17+
BaseModel,
18+
ConfigDict,
19+
Field,
20+
conlist,
21+
field_validator,
22+
model_validator,
23+
)
1724

1825

1926
# ---------------------------------------------------------------------------
@@ -93,6 +100,27 @@ class ChatCompletionRequest(BaseModel):
93100
default=None, description="Number of top log probabilities to return"
94101
)
95102

103+
@field_validator("messages", mode="before")
104+
@classmethod
105+
def _normalize_assistant_content(cls, v: Any) -> Any:
106+
"""Allow ``content: null`` on assistant messages (OpenAI spec compat).
107+
108+
llama-cpp's TypedDict defines ``content: NotRequired[str]`` which
109+
rejects ``None`` when the key is present. The OpenAI spec allows
110+
``null`` on assistant messages that carry ``tool_calls``, and
111+
langchain-openai sends it that way, so we normalise here.
112+
"""
113+
if isinstance(v, list):
114+
for msg in v:
115+
if (
116+
isinstance(msg, dict)
117+
and msg.get("role") == "assistant"
118+
and "content" in msg
119+
and msg["content"] is None
120+
):
121+
msg["content"] = ""
122+
return v
123+
96124
@model_validator(mode="after")
97125
def _default_tool_choice_auto(self) -> Self:
98126
"""Match OpenAI: default to "auto" when tools are present."""

0 commit comments

Comments
 (0)