Reference for the current public module interface.
import mcpplibs.llmapi;
using namespace mcpplibs::llmapi;mcpplibs.llmapimcpplibs.llmapi:typesmcpplibs.llmapi:urlmcpplibs.llmapi:coromcpplibs.llmapi:providermcpplibs.llmapi:clientmcpplibs.llmapi:openaimcpplibs.llmapi:anthropicmcpplibs.llmapi:errors
Important exported structs and enums:
RoleMessageTextContent,ImageContent,AudioContentToolDef,ToolCall,ToolUseContent,ToolResultContentChatParamsChatResponseEmbeddingResponseConversationUsageResponseFormat
template<typename P>
concept Provider = requires(P p, const std::vector<Message>& messages, const ChatParams& params) {
{ p.name() } -> std::convertible_to<std::string_view>;
{ p.chat(messages, params) } -> std::same_as<ChatResponse>;
{ p.chat_async(messages, params) } -> std::same_as<Task<ChatResponse>>;
};template<typename P>
concept StreamableProvider = Provider<P> && requires(
P p,
const std::vector<Message>& messages,
const ChatParams& params,
std::function<void(std::string_view)> cb
) {
{ p.chat_stream(messages, params, cb) } -> std::same_as<ChatResponse>;
{ p.chat_stream_async(messages, params, cb) } -> std::same_as<Task<ChatResponse>>;
};template<typename P>
concept EmbeddableProvider = Provider<P> && requires(
P p,
const std::vector<std::string>& inputs,
std::string_view model
) {
{ p.embed(inputs, model) } -> std::same_as<EmbeddingResponse>;
};Client is a class template that owns a provider instance and a Conversation.
template<Provider P>
class Client;auto client = Client(Config{
.apiKey = std::getenv("OPENAI_API_KEY"),
.model = "gpt-4o-mini",
});Config is an exported alias for openai::Config, so the default path uses an OpenAI-style provider without explicitly writing openai::OpenAI.
Client& default_params(ChatParams params)Stores default parameters used by chat(), chat_async(), and chat_stream().
Client& system(std::string_view content)
Client& user(std::string_view content)
Client& add_message(Message msg)
Client& clear()ChatResponse chat(std::string_view userMessage)
ChatResponse chat(std::string_view userMessage, ChatParams params)chat() appends the user message, sends the full conversation, stores the assistant text response, and returns the parsed ChatResponse.
Task<ChatResponse> chat_async(std::string_view userMessage)ChatResponse chat_stream(
std::string_view userMessage,
std::function<void(std::string_view)> callback
)Available only when P satisfies StreamableProvider.
Task<ChatResponse> chat_stream_async(
std::string_view userMessage,
std::function<void(std::string_view)> callback
)EmbeddingResponse embed(const std::vector<std::string>& inputs, std::string_view model)Available only when P satisfies EmbeddableProvider.
const Conversation& conversation() const
Conversation& conversation()
void save_conversation(std::string_view filePath) const
void load_conversation(std::string_view filePath)
const P& provider() const
P& provider()Client<P>is stateful and not thread-safe- use one client per task or thread
- do not share one client across concurrent callers
openai::Config {
std::string apiKey;
std::string baseUrl { "https://api.openai.com/v1" };
std::string model;
std::string organization;
std::optional<std::string> proxy;
std::map<std::string, std::string> customHeaders;
}anthropic::Config {
std::string apiKey;
std::string baseUrl { "https://api.anthropic.com/v1" };
std::string model;
std::string version { "2023-06-01" };
int defaultMaxTokens { 4096 };
std::optional<std::string> proxy;
std::map<std::string, std::string> customHeaders;
}struct ChatParams {
std::optional<double> temperature;
std::optional<double> topP;
std::optional<int> maxTokens;
std::optional<std::vector<std::string>> stop;
std::optional<std::vector<ToolDef>> tools;
std::optional<ToolChoicePolicy> toolChoice;
std::optional<ResponseFormat> responseFormat;
std::optional<std::string> extraJson;
};struct ChatResponse {
std::string id;
std::string model;
std::vector<ContentPart> content;
StopReason stopReason;
Usage usage;
std::string text() const;
std::vector<ToolCall> tool_calls() const;
};struct Conversation {
std::vector<Message> messages;
void push(Message msg);
void clear();
int size() const;
void save(std::string_view filePath) const;
static Conversation load(std::string_view filePath);
};import mcpplibs.llmapi;
import std;
int main() {
using namespace mcpplibs::llmapi;
auto client = Client(Config{
.apiKey = std::getenv("OPENAI_API_KEY"),
.model = "gpt-4o-mini",
});
client.default_params(ChatParams{
.temperature = 0.2,
});
client.system("Be concise.");
auto resp1 = client.chat("What is C++?");
std::cout << resp1.text() << '\n';
auto resp2 = client.chat_stream("Give me one example of a C++23 feature.", [](std::string_view chunk) {
std::cout << chunk;
});
std::cout << "\nmessages=" << client.conversation().size() << '\n';
return 0;
}try {
auto resp = client.chat("Hello");
} catch (const std::runtime_error& e) {
std::cerr << "Error: " << e.what() << '\n';
}