Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -56,15 +56,15 @@ int main() {
// Ensure ANTHROPIC_API_KEY environment variable is set
auto client = ai::anthropic::create_client();
auto result = client.generate_text({
.model = ai::anthropic::models::kClaude35Sonnet,
.model = ai::anthropic::models::kClaudeSonnet45,
.system = "You are a helpful assistant.",
.prompt = "Explain quantum computing in simple terms."
});

if (result) {
std::cout << result->text << std::endl;
}

return 0;
}
```
Expand Down
2 changes: 1 addition & 1 deletion examples/basic_chat.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ int main() {

auto client4 = ai::anthropic::create_client();
ai::GenerateOptions options4;
options4.model = ai::anthropic::models::kClaudeSonnet35;
options4.model = ai::anthropic::models::kClaudeSonnet45;
options4.prompt =
"Write a haiku about programming. Just the haiku, nothing else.";

Expand Down
4 changes: 2 additions & 2 deletions examples/components/all/main.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -52,8 +52,8 @@ int main() {
auto anthropic_client = ai::anthropic::create_client();
std::cout << "✓ Anthropic client created successfully\n";
std::cout << "✓ Available models: "
<< ai::anthropic::models::kClaudeSonnet35 << ", "
<< ai::anthropic::models::kClaudeHaiku35 << "\n";
<< ai::anthropic::models::kClaudeSonnet45 << ", "
<< ai::anthropic::models::kClaudeHaiku45 << "\n";
} catch (const std::exception& e) {
std::cout << "✗ Anthropic client failed: " << e.what() << "\n";
}
Expand Down
6 changes: 3 additions & 3 deletions examples/components/anthropic/main.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ int main() {
// Test core functionality
std::cout << "Testing core functionality...\n";
ai::GenerateOptions options;
options.model = "claude-3-5-sonnet-20241022";
options.model = "claude-sonnet-4-5-20250929";
options.prompt = "Hello world";
std::cout << "✓ Core types work fine\n\n";

Expand All @@ -37,8 +37,8 @@ int main() {
auto client = ai::anthropic::create_client();
std::cout << "✓ Anthropic client created successfully\n";
std::cout << "✓ Available models: "
<< ai::anthropic::models::kClaudeSonnet35 << ", "
<< ai::anthropic::models::kClaudeHaiku35 << "\n";
<< ai::anthropic::models::kClaudeSonnet45 << ", "
<< ai::anthropic::models::kClaudeHaiku45 << "\n";
} catch (const std::exception& e) {
std::cout << "✗ Anthropic client failed: " << e.what() << "\n";
}
Expand Down
8 changes: 4 additions & 4 deletions examples/multi_provider.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -94,9 +94,9 @@ int main() {

// Test Anthropic models
results1.push_back(test_provider(
"Anthropic", ai::anthropic::models::kClaudeSonnet35, simple_question));
"Anthropic", ai::anthropic::models::kClaudeSonnet45, simple_question));
results1.push_back(test_provider(
"Anthropic", ai::anthropic::models::kClaudeHaiku35, simple_question));
"Anthropic", ai::anthropic::models::kClaudeHaiku45, simple_question));

for (const auto& result : results1) {
print_result(result);
Expand All @@ -118,7 +118,7 @@ int main() {
results2.push_back(
test_provider("OpenAI", ai::openai::models::kGpt4o, creative_prompt));
results2.push_back(test_provider(
"Anthropic", ai::anthropic::models::kClaudeSonnet35, creative_prompt));
"Anthropic", ai::anthropic::models::kClaudeSonnet45, creative_prompt));

for (const auto& result : results2) {
print_result(result);
Expand All @@ -137,7 +137,7 @@ int main() {
results3.push_back(
test_provider("OpenAI", ai::openai::models::kGpt4o, technical_prompt));
results3.push_back(test_provider(
"Anthropic", ai::anthropic::models::kClaudeSonnet35, technical_prompt));
"Anthropic", ai::anthropic::models::kClaudeSonnet45, technical_prompt));

for (const auto& result : results3) {
print_result(result);
Expand Down
6 changes: 3 additions & 3 deletions examples/openrouter_example.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -32,11 +32,11 @@ int main() {
std::cout << "Testing text generation with OpenRouter...\n\n";

// Using a model that's available on OpenRouter
// Common models: "openai/gpt-3.5-turbo", "anthropic/claude-3.5-sonnet",
// Common models: "openai/gpt-4o", "anthropic/claude-sonnet-4-5",
// "meta-llama/llama-3.1-8b-instruct" See https://openrouter.ai/models for
// available models
ai::GenerateOptions options(
"anthropic/claude-3.5-sonnet", "You are a helpful assistant.",
"anthropic/claude-sonnet-4-5", "You are a helpful assistant.",
"What are the benefits of using OpenRouter for AI applications? Give a "
"brief answer.");

Expand All @@ -55,7 +55,7 @@ int main() {
// Test streaming with OpenRouter
std::cout << "\n\nTesting streaming with OpenRouter...\n";

ai::GenerateOptions stream_opts("anthropic/claude-3.5-sonnet",
ai::GenerateOptions stream_opts("anthropic/claude-sonnet-4-5",
"You are a creative writer.",
"Write a haiku about API compatibility.");
ai::StreamOptions stream_options(stream_opts);
Expand Down
2 changes: 1 addition & 1 deletion examples/test_tool_integration.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ void test_anthropic_tools() {
{{"input", "string"}}, simple_test_tool)}};

ai::GenerateOptions options;
options.model = ai::anthropic::models::kClaudeSonnet35;
options.model = ai::anthropic::models::kClaudeSonnet45;
options.prompt = "Please use the test_tool with input 'hello anthropic'";
options.tools = tools;
options.tool_choice =
Expand Down
4 changes: 2 additions & 2 deletions examples/tool_calling_async.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -337,12 +337,12 @@ int main() {

auto anthropic_client = ai::anthropic::create_client();

std::cout << "Testing async tools with Claude 3.5 Sonnet\n\n";
std::cout << "Testing async tools with Claude Sonnet 4.5\n\n";

start_time = std::chrono::high_resolution_clock::now();

ai::GenerateOptions anthropic_options;
anthropic_options.model = ai::anthropic::models::kClaudeSonnet35;
anthropic_options.model = ai::anthropic::models::kClaudeSonnet45;
anthropic_options.prompt = R"(
Please help me with these THREE tasks. You MUST use the tools to complete ALL of them:
1. Use the fetch_news tool to get tech news articles
Expand Down
2 changes: 1 addition & 1 deletion include/ai/ai.h
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@
/// ```cpp
/// auto client = ai::anthropic::create_client();
/// auto result = client.generate_text({
/// .model = ai::anthropic::models::kClaude35Sonnet,
/// .model = ai::anthropic::models::kClaudeSonnet45,
/// .system = "You are a helpful assistant.",
/// .prompt = "Explain quantum computing in simple terms."
/// });
Expand Down
14 changes: 11 additions & 3 deletions include/ai/anthropic.h
Original file line number Diff line number Diff line change
Expand Up @@ -14,20 +14,28 @@ namespace ai {
namespace anthropic {

namespace models {
/// Common Anthropic model identifiers
/// Common Anthropic model identifiers (Latest models)
constexpr const char* kClaudeSonnet45 =
"claude-sonnet-4-5"; // claude-sonnet-4-5-20250929
constexpr const char* kClaudeHaiku45 =
"claude-haiku-4-5"; // claude-haiku-4-5-20251001
constexpr const char* kClaudeOpus41 =
"claude-opus-4-1"; // claude-opus-4-1-20250805

/// Legacy model identifiers (retained for backward compatibility)
constexpr const char* kClaudeOpus4 =
"claude-opus-4-0"; // claude-opus-4-20250514
constexpr const char* kClaudeSonnet4 =
"claude-sonnet-4-0"; // claude-sonnet-4-20250514
constexpr const char* kClaudeSonnet37 =
"claude-3-7-sonnet-latest"; // claude-3-7-sonnet-20250219
constexpr const char* kClaudeSonnet35 =
"claude-3-5-sonnet-latest"; // claude-3-5-sonnet-20241022
"claude-3-5-sonnet-latest"; // claude-3-5-sonnet-20241022 (DEPRECATED)
constexpr const char* kClaudeHaiku35 =
"claude-3-5-haiku-latest"; // claude-3-5-haiku-20241022

/// Default model used when none is specified
constexpr const char* kDefaultModel = kClaudeSonnet35;
constexpr const char* kDefaultModel = kClaudeSonnet45;
} // namespace models

/// Create an Anthropic client with default configuration
Expand Down
9 changes: 7 additions & 2 deletions src/providers/anthropic/anthropic_client.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -59,8 +59,13 @@ std::string AnthropicClient::provider_name() const {
}

std::vector<std::string> AnthropicClient::supported_models() const {
return {"claude-3-5-sonnet-20241022", "claude-3-5-haiku-20241022",
"claude-3-opus-20240229", "claude-3-sonnet-20240229",
return {"claude-sonnet-4-5-20250929", // Latest Sonnet 4.5
"claude-haiku-4-5-20251001", // Latest Haiku 4.5
"claude-opus-4-1-20250805", // Latest Opus 4.1
"claude-sonnet-4-20250514", // Sonnet 4.0
"claude-3-5-sonnet-20241022", // Legacy 3.5 Sonnet
"claude-3-5-haiku-20241022", // Legacy 3.5 Haiku
"claude-3-opus-20240229", "claude-3-sonnet-20240229",
"claude-3-haiku-20240307"};
}

Expand Down
28 changes: 14 additions & 14 deletions tests/integration/anthropic_integration_test.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ TEST_F(AnthropicIntegrationTest, TextGenerationWithParameters) {
"Write a very short story about a cat.");
options.max_tokens = 50;
options.temperature = 0.7;
options.top_p = 0.9;
// Note: Claude 4.5 doesn't allow both temperature and top_p to be set

auto result = client_->generate_text(options);

Expand Down Expand Up @@ -135,7 +135,7 @@ TEST_F(AnthropicIntegrationTest, DifferentModelSupport) {

std::vector<std::string> models_to_test = {
ai::anthropic::models::kDefaultModel,
ai::anthropic::models::kClaudeHaiku35};
ai::anthropic::models::kClaudeHaiku45};

for (const auto& model : models_to_test) {
if (!client_->supports_model(model)) {
Expand Down Expand Up @@ -278,7 +278,7 @@ TEST_F(AnthropicIntegrationTest, LargePromptHandling) {
auto large_prompt =
TestDataGenerator::createLargePrompt(2000); // ~2KB prompt

GenerateOptions options(ai::anthropic::models::kClaudeSonnet35, large_prompt);
GenerateOptions options(ai::anthropic::models::kClaudeSonnet45, large_prompt);
auto result = client_->generate_text(options);

TestAssertions::assertSuccess(result);
Expand All @@ -299,7 +299,7 @@ TEST_F(AnthropicIntegrationTest, CustomBaseUrl) {
auto custom_client =
ai::anthropic::create_client(api_key, "https://api.anthropic.com");

GenerateOptions options(ai::anthropic::models::kClaudeSonnet35,
GenerateOptions options(ai::anthropic::models::kClaudeSonnet45,
"Test custom base URL");
auto result = custom_client.generate_text(options);

Expand All @@ -313,7 +313,7 @@ TEST_F(AnthropicIntegrationTest, EmptyPrompt) {
GTEST_SKIP() << "No ANTHROPIC_API_KEY environment variable set";
}

GenerateOptions options(ai::anthropic::models::kClaudeSonnet35, "");
GenerateOptions options(ai::anthropic::models::kClaudeSonnet45, "");

// Empty prompt should be caught by validation
EXPECT_FALSE(options.is_valid());
Expand All @@ -328,7 +328,7 @@ TEST_F(AnthropicIntegrationTest, VeryLongResponse) {
GTEST_SKIP() << "No ANTHROPIC_API_KEY environment variable set";
}

GenerateOptions options(ai::anthropic::models::kClaudeSonnet35,
GenerateOptions options(ai::anthropic::models::kClaudeSonnet45,
"Write a detailed explanation of quantum physics");
options.max_tokens = 500; // Reasonable limit for testing

Expand All @@ -349,7 +349,7 @@ TEST_F(AnthropicIntegrationTest, NetworkTimeout) {

// Note: This test documents timeout behavior but cannot reliably trigger it
// with the real API under normal conditions
GenerateOptions options(ai::anthropic::models::kClaudeSonnet35,
GenerateOptions options(ai::anthropic::models::kClaudeSonnet45,
"Simple test");
auto result = client_->generate_text(options);

Expand All @@ -375,7 +375,7 @@ TEST_F(AnthropicIntegrationTest, NetworkFailure) {
auto failing_client = ai::anthropic::create_client(
api_key, "http://localhost:59999"); // Very unlikely port to be in use

GenerateOptions options(ai::anthropic::models::kClaudeSonnet35,
GenerateOptions options(ai::anthropic::models::kClaudeSonnet45,
"Test network failure");
auto result = failing_client.generate_text(options);

Expand Down Expand Up @@ -426,7 +426,7 @@ TEST_F(AnthropicIntegrationTest, MaxTokensRequired) {
GTEST_SKIP() << "No ANTHROPIC_API_KEY environment variable set";
}

GenerateOptions options(ai::anthropic::models::kClaudeSonnet35,
GenerateOptions options(ai::anthropic::models::kClaudeSonnet45,
"Tell me about artificial intelligence");
// Anthropic requires max_tokens to be set
options.max_tokens = 100;
Expand All @@ -445,7 +445,7 @@ TEST_F(AnthropicIntegrationTest, SystemMessageHandling) {

// Anthropic has specific handling for system messages
GenerateOptions options(
ai::anthropic::models::kClaudeSonnet35,
ai::anthropic::models::kClaudeSonnet45,
"You are Claude, an AI assistant created by Anthropic.",
"What is your name?");
options.max_tokens = 50;
Expand Down Expand Up @@ -475,11 +475,11 @@ TEST_F(AnthropicIntegrationTest, DefaultModelGeneration) {
EXPECT_EQ(client_->default_model(), ai::anthropic::models::kDefaultModel);
if (result.model.has_value()) {
// Anthropic returns the full model version (e.g.,
// "claude-3-5-sonnet-20241022") while kDefaultModel is
// "claude-3-5-sonnet-latest"
EXPECT_TRUE(result.model.value().find("claude-3-5-sonnet") !=
// "claude-sonnet-4-5-20250929") while kDefaultModel is
// "claude-sonnet-4-5"
EXPECT_TRUE(result.model.value().find("claude-sonnet-4-5") !=
std::string::npos)
<< "Expected model to contain 'claude-3-5-sonnet', but got: "
<< "Expected model to contain 'claude-sonnet-4-5', but got: "
<< result.model.value();
}
}
Expand Down
2 changes: 1 addition & 1 deletion tests/integration/multi_step_duplicate_execution_test.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ class MultiStepDuplicateExecutionTest
if (api_key) {
use_real_api_ = true;
client_ = ai::anthropic::create_client(api_key);
model_ = ai::anthropic::models::kClaudeSonnet35;
model_ = ai::anthropic::models::kClaudeSonnet45;
} else {
use_real_api_ = false;
}
Expand Down
13 changes: 9 additions & 4 deletions tests/integration/tool_calling_integration_test.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ class ToolCallingIntegrationTest
if (api_key) {
use_real_api_ = true;
client_ = ai::anthropic::create_client(api_key);
model_ = ai::anthropic::models::kClaudeSonnet35;
model_ = ai::anthropic::models::kClaudeSonnet45;
} else {
use_real_api_ = false;
}
Expand Down Expand Up @@ -672,7 +672,7 @@ class AnthropicSpecificToolTest : public ::testing::Test {
if (api_key) {
use_real_api_ = true;
client_ = ai::anthropic::create_client(api_key);
model_ = ai::anthropic::models::kClaudeSonnet35;
model_ = ai::anthropic::models::kClaudeSonnet45;
} else {
use_real_api_ = false;
}
Expand All @@ -698,8 +698,13 @@ TEST_F(AnthropicSpecificToolTest, MaxTokensRequiredWithTools) {

auto result = client_->generate_text(options);

TestAssertions::assertSuccess(result);
EXPECT_FALSE(result.text.empty());
// Verify the request succeeded (may have tool calls instead of text)
EXPECT_TRUE(result.is_success())
<< "Expected successful result but got error: " << result.error_message();
EXPECT_FALSE(result.error.has_value());

// Note: When tools are provided, the model may return tool calls instead of
// text This test just verifies that requests with tools + max_tokens succeed
EXPECT_LE(result.usage.completion_tokens, 200);
}

Expand Down
14 changes: 7 additions & 7 deletions tests/unit/anthropic_client_test.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -81,17 +81,17 @@ TEST_F(AnthropicClientTest, ConstructorWithHttpUrl) {
TEST_F(AnthropicClientTest, SupportedModelsContainsExpectedModels) {
auto models = client_->supported_models();

EXPECT_THAT(models, testing::Contains("claude-3-5-sonnet-20241022"));
EXPECT_THAT(models, testing::Contains("claude-3-haiku-20240307"));
EXPECT_THAT(models, testing::Contains("claude-3-opus-20240229"));
EXPECT_THAT(models, testing::Contains("claude-3-sonnet-20240229"));
EXPECT_THAT(models, testing::Contains("claude-sonnet-4-5-20250929"));
EXPECT_THAT(models, testing::Contains("claude-haiku-4-5-20251001"));
EXPECT_THAT(models, testing::Contains("claude-opus-4-1-20250805"));
EXPECT_THAT(models, testing::Contains("claude-sonnet-4-20250514"));
EXPECT_FALSE(models.empty());
}

TEST_F(AnthropicClientTest, SupportsValidModel) {
EXPECT_TRUE(client_->supports_model("claude-3-5-sonnet-20241022"));
EXPECT_TRUE(client_->supports_model("claude-3-haiku-20240307"));
EXPECT_TRUE(client_->supports_model("claude-3-opus-20240229"));
EXPECT_TRUE(client_->supports_model("claude-sonnet-4-5-20250929"));
EXPECT_TRUE(client_->supports_model("claude-haiku-4-5-20251001"));
EXPECT_TRUE(client_->supports_model("claude-opus-4-1-20250805"));
}

TEST_F(AnthropicClientTest, DoesNotSupportInvalidModel) {
Expand Down
Loading