Skip to content

Commit 093a8db

Browse files
feat(providers): implement Anthropic, OpenRouter, Ollama, and Compatible providers
- Task 2.3: Anthropic Provider - Implement Anthropic Messages API with system prompt as top-level field - Handle tool_use and tool_result content blocks - Add streaming support via SSE - Task 2.4: OpenRouter, Ollama, and Compatible Providers - Add CompatibleProvider base class for OpenAI-compatible endpoints - Add OpenRouterProvider with required HTTP-Referer and X-Title headers - Add OllamaProvider for local models at localhost:11434 - All providers support tool calling, streaming, and model listing
1 parent 52548bc commit 093a8db

6 files changed

Lines changed: 806 additions & 0 deletions

File tree

Lines changed: 275 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,275 @@
1+
using System.Net.Http.Json;
2+
using System.Runtime.CompilerServices;
3+
using System.Text.Json;
4+
using ClawSharp.Core.Providers;
5+
using Microsoft.Extensions.Logging;
6+
7+
namespace ClawSharp.Providers;
8+
9+
/// <summary>
10+
/// Anthropic Messages API provider implementation.
11+
/// </summary>
12+
public class AnthropicProvider : ILlmProvider
13+
{
14+
private readonly HttpClient _http;
15+
private readonly ILogger<AnthropicProvider> _logger;
16+
17+
public string Name => "anthropic";
18+
19+
public AnthropicProvider(HttpClient httpClient, ILogger<AnthropicProvider> logger)
20+
{
21+
_http = httpClient ?? throw new ArgumentNullException(nameof(httpClient));
22+
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
23+
}
24+
25+
public async Task<bool> IsAvailableAsync(CancellationToken ct = default)
26+
{
27+
try
28+
{
29+
// Anthropic doesn't have a dedicated health endpoint, so we just check if we can connect
30+
var response = await _http.GetAsync("", ct);
31+
return response.IsSuccessStatusCode || response.StatusCode == System.Net.HttpStatusCode.Unauthorized;
32+
}
33+
catch (Exception ex)
34+
{
35+
_logger.LogDebug(ex, "Anthropic API not available");
36+
return false;
37+
}
38+
}
39+
40+
public Task<IReadOnlyList<string>> ListModelsAsync(CancellationToken ct = default)
41+
{
42+
// Anthropic doesn't have a public models list endpoint
43+
return Task.FromResult<IReadOnlyList<string>>([]);
44+
}
45+
46+
public async Task<LlmResponse> CompleteAsync(LlmRequest request, CancellationToken ct = default)
47+
{
48+
var body = BuildRequestBody(request);
49+
var response = await _http.PostAsJsonAsync("v1/messages", body, ct);
50+
response.EnsureSuccessStatusCode();
51+
52+
var json = await response.Content.ReadFromJsonAsync<JsonElement>(ct);
53+
return ParseResponse(json);
54+
}
55+
56+
public async IAsyncEnumerable<LlmStreamChunk> StreamAsync(
57+
LlmRequest request,
58+
[EnumeratorCancellation] CancellationToken ct = default)
59+
{
60+
var body = BuildRequestBody(request, stream: true);
61+
var httpRequest = new HttpRequestMessage(HttpMethod.Post, "v1/messages")
62+
{
63+
Content = JsonContent.Create(body)
64+
};
65+
66+
using var response = await _http.SendAsync(httpRequest, HttpCompletionOption.ResponseHeadersRead, ct);
67+
response.EnsureSuccessStatusCode();
68+
69+
await using var stream = await response.Content.ReadAsStreamAsync(ct);
70+
using var reader = new StreamReader(stream);
71+
72+
string? line;
73+
while ((line = await reader.ReadLineAsync(ct)) != null)
74+
{
75+
if (string.IsNullOrWhiteSpace(line) || !line.StartsWith("data: ")) continue;
76+
77+
var data = line.Substring(6).Trim();
78+
if (data == "[DONE]") break;
79+
80+
var json = JsonSerializer.Deserialize<JsonElement>(data);
81+
var chunk = ParseStreamChunk(json);
82+
if (chunk != null) yield return chunk;
83+
}
84+
}
85+
86+
private object BuildRequestBody(LlmRequest request, bool stream = false)
87+
{
88+
var messages = new List<JsonElement>();
89+
JsonElement? systemJson = null;
90+
var toolResults = new List<JsonElement>();
91+
92+
foreach (var msg in request.Messages)
93+
{
94+
if (msg.Role == "system")
95+
{
96+
// Store system prompt to add as top-level field
97+
systemJson = JsonSerializer.SerializeToElement(new { type = "text", text = msg.Content });
98+
}
99+
else if (msg.Role == "tool")
100+
{
101+
// Tool results are sent as content blocks with type "tool_result"
102+
var toolResultObj = new
103+
{
104+
type = "tool_result",
105+
tool_use_id = msg.ToolCallId,
106+
content = msg.Content
107+
};
108+
toolResults.Add(JsonSerializer.SerializeToElement(toolResultObj));
109+
}
110+
else if (msg.ToolCalls != null && msg.ToolCalls.Count > 0)
111+
{
112+
// Assistant message with tool calls
113+
var toolUseArray = msg.ToolCalls.Select(tc =>
114+
{
115+
var inputElement = JsonSerializer.Deserialize<JsonElement>(tc.ArgumentsJson);
116+
return JsonSerializer.SerializeToElement(new
117+
{
118+
type = "tool_use",
119+
id = tc.Id,
120+
name = tc.Name,
121+
input = inputElement
122+
});
123+
}).ToArray();
124+
125+
var assistantMsg = new
126+
{
127+
role = "assistant",
128+
content = string.IsNullOrEmpty(msg.Content) ? null : msg.Content,
129+
tool_use = toolUseArray
130+
};
131+
messages.Add(JsonSerializer.SerializeToElement(assistantMsg));
132+
}
133+
else
134+
{
135+
// Regular message
136+
var regularMsg = new { role = msg.Role, content = msg.Content };
137+
messages.Add(JsonSerializer.SerializeToElement(regularMsg));
138+
}
139+
}
140+
141+
// If we have tool results, we need to add them after the last user message
142+
// by creating a special message with content array
143+
if (toolResults.Count > 0)
144+
{
145+
var combinedContent = new List<JsonElement>();
146+
147+
// Find last user message and get its content, then append tool results
148+
for (int i = messages.Count - 1; i >= 0; i--)
149+
{
150+
var msgJson = messages[i];
151+
if (msgJson.TryGetProperty("role", out var roleProp) && roleProp.GetString() == "user")
152+
{
153+
// Get existing content if it's a text block
154+
if (msgJson.TryGetProperty("content", out var contentProp) && contentProp.ValueKind == JsonValueKind.String)
155+
{
156+
var textContent = contentProp.GetString();
157+
if (!string.IsNullOrEmpty(textContent))
158+
{
159+
combinedContent.Add(JsonSerializer.SerializeToElement(new { type = "text", text = textContent }));
160+
}
161+
}
162+
163+
// Add tool results
164+
combinedContent.AddRange(toolResults);
165+
166+
// Replace the user message with content array including tool results
167+
var newUserMsg = JsonSerializer.SerializeToElement(new
168+
{
169+
role = "user",
170+
content = combinedContent
171+
});
172+
messages[i] = newUserMsg;
173+
break;
174+
}
175+
}
176+
}
177+
178+
var body = new Dictionary<string, object>
179+
{
180+
["model"] = request.Model,
181+
["messages"] = messages.Select(m => JsonSerializer.Deserialize<object>(m.ToString())).ToArray(),
182+
["stream"] = stream,
183+
["max_tokens"] = request.MaxTokens ?? 4096
184+
};
185+
186+
if (systemJson.HasValue)
187+
{
188+
body["system"] = systemJson.Value.GetProperty("text").GetString() ?? "";
189+
}
190+
191+
if (request.Tools != null && request.Tools.Count > 0)
192+
{
193+
body["tools"] = request.Tools.Select(t => new
194+
{
195+
name = t.Name,
196+
description = t.Description,
197+
input_schema = t.ParametersSchema
198+
}).ToArray();
199+
}
200+
201+
return body;
202+
}
203+
204+
private LlmResponse ParseResponse(JsonElement json)
205+
{
206+
var content = "";
207+
var toolCalls = new List<ToolCallRequest>();
208+
209+
if (json.TryGetProperty("content", out var contentArray))
210+
{
211+
foreach (var block in contentArray.EnumerateArray())
212+
{
213+
var type = block.GetProperty("type").GetString();
214+
if (type == "text")
215+
{
216+
content += block.GetProperty("text").GetString();
217+
}
218+
else if (type == "tool_use")
219+
{
220+
var id = block.GetProperty("id").GetString()!;
221+
var name = block.GetProperty("name").GetString()!;
222+
var input = block.GetProperty("input");
223+
var arguments = JsonSerializer.Serialize(input);
224+
225+
toolCalls.Add(new ToolCallRequest(id, name, arguments));
226+
}
227+
}
228+
}
229+
230+
var stopReason = json.TryGetProperty("stop_reason", out var reason)
231+
? reason.GetString() ?? "stop"
232+
: "stop";
233+
234+
UsageInfo? usage = null;
235+
if (json.TryGetProperty("usage", out var usageProp))
236+
{
237+
usage = new UsageInfo(
238+
usageProp.GetProperty("input_tokens").GetInt32(),
239+
usageProp.GetProperty("output_tokens").GetInt32(),
240+
usageProp.GetProperty("input_tokens").GetInt32() + usageProp.GetProperty("output_tokens").GetInt32()
241+
);
242+
}
243+
244+
return new LlmResponse(content, toolCalls, stopReason, usage);
245+
}
246+
247+
private LlmStreamChunk? ParseStreamChunk(JsonElement json)
248+
{
249+
if (!json.TryGetProperty("content", out var contentArray) || contentArray.GetArrayLength() == 0)
250+
return null;
251+
252+
string? contentDelta = null;
253+
string? finishReason = null;
254+
255+
foreach (var block in contentArray.EnumerateArray())
256+
{
257+
var type = block.GetProperty("type").GetString();
258+
if (type == "text")
259+
{
260+
contentDelta = (contentDelta ?? "") + block.GetProperty("text").GetString();
261+
}
262+
else if (type == "content_block_stop")
263+
{
264+
// Streaming stopped
265+
}
266+
}
267+
268+
if (json.TryGetProperty("stop_reason", out var reason) && reason.ValueKind != JsonValueKind.Null)
269+
{
270+
finishReason = reason.GetString();
271+
}
272+
273+
return new LlmStreamChunk(contentDelta, null, finishReason, null);
274+
}
275+
}

0 commit comments

Comments
 (0)