Skip to content

Commit b3fcdee

Browse files
committed
fix(oci): tighten mixed v1 v2 stream coverage
1 parent fe51ec3 commit b3fcdee

File tree

2 files changed

+22
-9
lines changed

2 files changed

+22
-9
lines changed

src/cohere/oci_client.py

Lines changed: 17 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -566,7 +566,12 @@ def get_oci_url(
566566
"rerank": "rerankText", # OCI uses rerankText, not rerank
567567
}
568568

569-
action = action_map.get(endpoint, endpoint)
569+
action = action_map.get(endpoint)
570+
if action is None:
571+
raise ValueError(
572+
f"Endpoint '{endpoint}' is not supported by OCI Generative AI. "
573+
f"Supported endpoints: {list(action_map.keys())}"
574+
)
570575
return f"{base}/{api_version}/actions/{action}"
571576

572577

@@ -1089,11 +1094,14 @@ def _process_line(line: str) -> typing.Iterator[bytes]:
10891094
except json.JSONDecodeError:
10901095
return
10911096

1092-
if is_v2:
1093-
for event_bytes in _transform_v2_event(oci_event):
1094-
yield event_bytes
1095-
else:
1096-
yield _transform_v1_event(oci_event)
1097+
try:
1098+
if is_v2:
1099+
for event_bytes in _transform_v2_event(oci_event):
1100+
yield event_bytes
1101+
else:
1102+
yield _transform_v1_event(oci_event)
1103+
except Exception as exc:
1104+
raise RuntimeError(f"OCI stream event transformation failed for endpoint '{endpoint}': {exc}") from exc
10971105

10981106
for chunk in stream:
10991107
buffer += chunk
@@ -1129,6 +1137,9 @@ def transform_stream_event(
11291137
content_value = ""
11301138
message = oci_event.get("message")
11311139

1140+
if "message" in oci_event and not isinstance(message, dict):
1141+
raise TypeError("OCI V2 stream event message must be an object")
1142+
11321143
if isinstance(message, dict) and "content" in message:
11331144
content_list = message["content"]
11341145
if content_list and isinstance(content_list, list) and len(content_list) > 0:

tests/test_oci_client.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -956,6 +956,7 @@ def test_embed_response_lowercases_embedding_keys(self):
956956
result = transform_oci_response_to_cohere(
957957
"embed",
958958
{"id": "embed-id", "embeddings": {"FLOAT": [[0.1, 0.2]], "INT8": [[1, 2]]}},
959+
is_v2=True,
959960
)
960961

961962
self.assertIn("float", result["embeddings"])
@@ -1003,7 +1004,7 @@ def test_stream_wrapper_emits_full_event_lifecycle(self):
10031004
]
10041005

10051006
events = []
1006-
for raw in transform_oci_stream_wrapper(iter(chunks), "chat"):
1007+
for raw in transform_oci_stream_wrapper(iter(chunks), "chat", is_v2=True):
10071008
line = raw.decode("utf-8").strip()
10081009
if line.startswith("data: "):
10091010
events.append(json.loads(line[6:]))
@@ -1034,7 +1035,7 @@ def test_stream_wrapper_skips_malformed_json_with_warning(self):
10341035
b'data: {"message": {"content": [{"type": "TEXT", "text": "hello"}]}}\n',
10351036
b'data: [DONE]\n',
10361037
]
1037-
events = list(transform_oci_stream_wrapper(iter(chunks), "chat"))
1038+
events = list(transform_oci_stream_wrapper(iter(chunks), "chat", is_v2=True))
10381039
# Should get message-start + content-start + content-delta + content-end + message-end.
10391040
self.assertEqual(len(events), 5)
10401041

@@ -1048,7 +1049,7 @@ def test_stream_wrapper_raises_on_transform_error(self):
10481049
b'data: {"message": null}\n',
10491050
]
10501051
with self.assertRaises(RuntimeError) as ctx:
1051-
list(transform_oci_stream_wrapper(iter(chunks), "chat"))
1052+
list(transform_oci_stream_wrapper(iter(chunks), "chat", is_v2=True))
10521053
self.assertIn("OCI stream event transformation failed", str(ctx.exception))
10531054

10541055
def test_stream_event_finish_reason_keeps_final_text(self):
@@ -1061,6 +1062,7 @@ def test_stream_event_finish_reason_keeps_final_text(self):
10611062
"message": {"content": [{"type": "TEXT", "text": " world"}]},
10621063
"finishReason": "COMPLETE",
10631064
},
1065+
is_v2=True,
10641066
)
10651067

10661068
self.assertEqual(events[0]["type"], "content-delta")

0 commit comments

Comments
 (0)