Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 11 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,17 @@ jobs:
matrix:
build_type: [debug, release]

services:
clickhouse:
image: clickhouse/clickhouse-server
ports:
- 18123:8123
- 19000:9000
env:
CLICKHOUSE_PASSWORD: changeme
options: >-
--ulimit nofile=262144:262144

steps:
- name: Checkout code
uses: actions/checkout@v4
Expand Down
3 changes: 3 additions & 0 deletions .gitmodules
Original file line number Diff line number Diff line change
Expand Up @@ -7,3 +7,6 @@
[submodule "third_party/googletest"]
path = third_party/googletest
url = https://github.com/google/googletest.git
[submodule "third_party/clickhouse-cpp"]
path = third_party/clickhouse-cpp
url = https://github.com/ClickHouse/clickhouse-cpp.git
25 changes: 24 additions & 1 deletion include/ai/openai.h
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,35 @@ namespace openai {

namespace models {
/// Common OpenAI model identifiers

// O-series reasoning models
constexpr const char* kO1 = "o1";
constexpr const char* kO1Mini = "o1-mini";
constexpr const char* kO1Preview = "o1-preview";
constexpr const char* kO3 = "o3";
constexpr const char* kO3Mini = "o3-mini";
constexpr const char* kO4Mini = "o4-mini";

// GPT-4.1 series
constexpr const char* kGpt41 = "gpt-4.1";
constexpr const char* kGpt41Mini = "gpt-4.1-mini";
constexpr const char* kGpt41Nano = "gpt-4.1-nano";

// GPT-4o series
constexpr const char* kGpt4o = "gpt-4o";
constexpr const char* kGpt4oMini = "gpt-4o-mini";
constexpr const char* kGpt4oAudioPreview = "gpt-4o-audio-preview";

// GPT-4 series
constexpr const char* kGpt4Turbo = "gpt-4-turbo";
constexpr const char* kGpt35Turbo = "gpt-3.5-turbo";
constexpr const char* kGpt4 = "gpt-4";

// GPT-3.5 series
constexpr const char* kGpt35Turbo = "gpt-3.5-turbo";

// Special models
constexpr const char* kChatGpt4oLatest = "chatgpt-4o-latest";

/// Default model used when none is specified
constexpr const char* kDefaultModel = kGpt4o;
} // namespace models
Expand Down
3 changes: 2 additions & 1 deletion include/ai/tools.h
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,8 @@ class MultiStepCoordinator {
/// @return Final generation result with all steps
static GenerateResult execute_multi_step(
const GenerateOptions& initial_options,
std::function<GenerateResult(const GenerateOptions&)> generate_func);
const std::function<GenerateResult(const GenerateOptions&)>&
generate_func);

private:
/// Create the next generation options based on previous step
Expand Down
137 changes: 128 additions & 9 deletions include/ai/types/message.h
Original file line number Diff line number Diff line change
Expand Up @@ -3,29 +3,148 @@
#include "enums.h"

#include <string>
#include <variant>
#include <vector>

#include <nlohmann/json.hpp>

namespace ai {

using JsonValue = nlohmann::json;

// Base content part types
struct TextContentPart {
std::string text;

explicit TextContentPart(std::string t) : text(std::move(t)) {}
};

struct ToolCallContentPart {
std::string id;
std::string tool_name;
JsonValue arguments;

ToolCallContentPart(std::string i, std::string n, JsonValue a)
: id(std::move(i)), tool_name(std::move(n)), arguments(std::move(a)) {}
};

struct ToolResultContentPart {
std::string tool_call_id;
JsonValue result;
bool is_error = false;

ToolResultContentPart(std::string id, JsonValue r, bool err = false)
: tool_call_id(std::move(id)), result(std::move(r)), is_error(err) {}
};

// Content part variant
using ContentPart =
std::variant<TextContentPart, ToolCallContentPart, ToolResultContentPart>;

// Message content is now a vector of content parts
using MessageContent = std::vector<ContentPart>;

struct Message {
MessageRole role;
std::string content;
MessageContent content;

Message(MessageRole r, std::string c) : role(r), content(std::move(c)) {}
Message(MessageRole r, MessageContent c) : role(r), content(std::move(c)) {}

static Message system(const std::string& content) {
return Message(kMessageRoleSystem, content);
// Factory methods for convenience
static Message system(const std::string& text) {
return Message(kMessageRoleSystem, {TextContentPart{text}});
}

static Message user(const std::string& content) {
return Message(kMessageRoleUser, content);
static Message user(const std::string& text) {
return Message(kMessageRoleUser, {TextContentPart{text}});
}

static Message assistant(const std::string& content) {
return Message(kMessageRoleAssistant, content);
static Message assistant(const std::string& text) {
return Message(kMessageRoleAssistant, {TextContentPart{text}});
}

bool empty() const { return content.empty(); }
static Message assistant_with_tools(
const std::string& text,
const std::vector<ToolCallContentPart>& tools) {
MessageContent content_parts;

// Add text content if not empty
if (!text.empty()) {
content_parts.emplace_back(TextContentPart{text});
}

// Add tool calls
for (const auto& tool : tools) {
content_parts.emplace_back(
ToolCallContentPart{tool.id, tool.tool_name, tool.arguments});
}

return Message(kMessageRoleAssistant, std::move(content_parts));
}

static Message tool_results(
const std::vector<ToolResultContentPart>& results) {
MessageContent content_parts;
for (const auto& result : results) {
content_parts.emplace_back(ToolResultContentPart{
result.tool_call_id, result.result, result.is_error});
}
return Message(kMessageRoleUser, std::move(content_parts));
}

// Helper methods
bool has_text() const {
return std::any_of(content.begin(), content.end(),
[](const ContentPart& part) {
return std::holds_alternative<TextContentPart>(part);
});
}

bool has_tool_calls() const {
return std::any_of(
content.begin(), content.end(), [](const ContentPart& part) {
return std::holds_alternative<ToolCallContentPart>(part);
});
}

bool has_tool_results() const {
return std::any_of(
content.begin(), content.end(), [](const ContentPart& part) {
return std::holds_alternative<ToolResultContentPart>(part);
});
}

std::string get_text() const {
std::string result;
for (const auto& part : content) {
if (const auto* text_part = std::get_if<TextContentPart>(&part)) {
result += text_part->text;
}
}
return result;
}

std::vector<ToolCallContentPart> get_tool_calls() const {
std::vector<ToolCallContentPart> result;
for (const auto& part : content) {
if (const auto* tool_part = std::get_if<ToolCallContentPart>(&part)) {
result.emplace_back(tool_part->id, tool_part->tool_name,
tool_part->arguments);
}
}
return result;
}

std::vector<ToolResultContentPart> get_tool_results() const {
std::vector<ToolResultContentPart> result;
for (const auto& part : content) {
if (const auto* result_part = std::get_if<ToolResultContentPart>(&part)) {
result.emplace_back(result_part->tool_call_id, result_part->result,
result_part->is_error);
}
}
return result;
}

std::string roleToString() const {
switch (role) {
Expand Down
19 changes: 7 additions & 12 deletions include/ai/types/tool.h
Original file line number Diff line number Diff line change
Expand Up @@ -15,13 +15,15 @@

namespace ai {

// Forward declarations
struct ToolCall;
struct ToolExecutionContext;

/// JSON value type for tool parameters and results
using JsonValue = nlohmann::json;

/// Context provided to tool execution functions
struct ToolExecutionContext {
std::string tool_call_id;
Messages messages;
std::optional<std::function<void()>> abort_signal;
};

/// Tool execution function signature
/// Parameters: (args, context) -> result
using ToolExecuteFunction =
Expand All @@ -33,13 +35,6 @@ using AsyncToolExecuteFunction =
std::function<std::future<JsonValue>(const JsonValue&,
const ToolExecutionContext&)>;

/// Context provided to tool execution functions
struct ToolExecutionContext {
std::string tool_call_id;
Messages messages;
std::optional<std::function<void()>> abort_signal;
};

struct Tool {
std::string description;
JsonValue parameters_schema;
Expand Down
58 changes: 56 additions & 2 deletions src/providers/anthropic/anthropic_request_builder.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,62 @@ nlohmann::json AnthropicRequestBuilder::build_request_json(
// Use provided messages
for (const auto& msg : options.messages) {
nlohmann::json message;
message["role"] = utils::message_role_to_string(msg.role);
message["content"] = msg.content;

// Handle different content types
if (msg.has_tool_results()) {
// Anthropic expects tool results as content arrays in user messages
message["role"] = "user";
message["content"] = nlohmann::json::array();

for (const auto& result : msg.get_tool_results()) {
nlohmann::json tool_result_content;
tool_result_content["type"] = "tool_result";
tool_result_content["tool_use_id"] = result.tool_call_id;

if (!result.is_error) {
tool_result_content["content"] = result.result.dump();
} else {
tool_result_content["content"] = result.result.dump();
tool_result_content["is_error"] = true;
}

message["content"].push_back(tool_result_content);
}
} else {
// Handle messages with text and/or tool calls
message["role"] = utils::message_role_to_string(msg.role);

// Get text content and tool calls
std::string text_content = msg.get_text();
auto tool_calls = msg.get_tool_calls();

// Anthropic expects content as array for mixed content or tool calls
if (!tool_calls.empty() ||
(msg.role == kMessageRoleAssistant && !text_content.empty())) {
message["content"] = nlohmann::json::array();

// Add text content if present
if (!text_content.empty()) {
message["content"].push_back(
{{"type", "text"}, {"text", text_content}});
}

// Add tool use content
for (const auto& tool_call : tool_calls) {
message["content"].push_back({{"type", "tool_use"},
{"id", tool_call.id},
{"name", tool_call.tool_name},
{"input", tool_call.arguments}});
}
} else if (!text_content.empty()) {
// Simple text message (non-assistant or assistant with text only)
message["content"] = text_content;
} else {
// Empty message, skip
continue;
}
}

request["messages"].push_back(message);
}
} else {
Expand Down
2 changes: 1 addition & 1 deletion src/providers/anthropic/anthropic_response_parser.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ GenerateResult AnthropicResponseParser::parse_success_response(

// Add assistant response to messages
if (!result.text.empty()) {
result.response_messages.push_back({kMessageRoleAssistant, result.text});
result.response_messages.push_back(Message::assistant(result.text));
}
}

Expand Down
Loading
Loading