diff --git a/README.md b/README.md index 47b4db7..8b4bf0a 100644 --- a/README.md +++ b/README.md @@ -56,15 +56,15 @@ int main() { // Ensure ANTHROPIC_API_KEY environment variable is set auto client = ai::anthropic::create_client(); auto result = client.generate_text({ - .model = ai::anthropic::models::kClaude35Sonnet, + .model = ai::anthropic::models::kClaudeSonnet45, .system = "You are a helpful assistant.", .prompt = "Explain quantum computing in simple terms." }); - + if (result) { std::cout << result->text << std::endl; } - + return 0; } ``` diff --git a/examples/basic_chat.cpp b/examples/basic_chat.cpp index e74e82e..76ecebe 100644 --- a/examples/basic_chat.cpp +++ b/examples/basic_chat.cpp @@ -98,7 +98,7 @@ int main() { auto client4 = ai::anthropic::create_client(); ai::GenerateOptions options4; - options4.model = ai::anthropic::models::kClaudeSonnet35; + options4.model = ai::anthropic::models::kClaudeSonnet45; options4.prompt = "Write a haiku about programming. Just the haiku, nothing else."; diff --git a/examples/components/all/main.cpp b/examples/components/all/main.cpp index f0b2bc2..3139094 100644 --- a/examples/components/all/main.cpp +++ b/examples/components/all/main.cpp @@ -52,8 +52,8 @@ int main() { auto anthropic_client = ai::anthropic::create_client(); std::cout << "✓ Anthropic client created successfully\n"; std::cout << "✓ Available models: " - << ai::anthropic::models::kClaudeSonnet35 << ", " - << ai::anthropic::models::kClaudeHaiku35 << "\n"; + << ai::anthropic::models::kClaudeSonnet45 << ", " + << ai::anthropic::models::kClaudeHaiku45 << "\n"; } catch (const std::exception& e) { std::cout << "✗ Anthropic client failed: " << e.what() << "\n"; } diff --git a/examples/components/anthropic/main.cpp b/examples/components/anthropic/main.cpp index e51f51c..d5a6340 100644 --- a/examples/components/anthropic/main.cpp +++ b/examples/components/anthropic/main.cpp @@ -26,7 +26,7 @@ int main() { // Test core functionality std::cout << "Testing core functionality...\n"; ai::GenerateOptions options; - options.model = "claude-3-5-sonnet-20241022"; + options.model = "claude-sonnet-4-5-20250929"; options.prompt = "Hello world"; std::cout << "✓ Core types work fine\n\n"; @@ -37,8 +37,8 @@ int main() { auto client = ai::anthropic::create_client(); std::cout << "✓ Anthropic client created successfully\n"; std::cout << "✓ Available models: " - << ai::anthropic::models::kClaudeSonnet35 << ", " - << ai::anthropic::models::kClaudeHaiku35 << "\n"; + << ai::anthropic::models::kClaudeSonnet45 << ", " + << ai::anthropic::models::kClaudeHaiku45 << "\n"; } catch (const std::exception& e) { std::cout << "✗ Anthropic client failed: " << e.what() << "\n"; } diff --git a/examples/multi_provider.cpp b/examples/multi_provider.cpp index 433d39e..76b65db 100644 --- a/examples/multi_provider.cpp +++ b/examples/multi_provider.cpp @@ -94,9 +94,9 @@ int main() { // Test Anthropic models results1.push_back(test_provider( - "Anthropic", ai::anthropic::models::kClaudeSonnet35, simple_question)); + "Anthropic", ai::anthropic::models::kClaudeSonnet45, simple_question)); results1.push_back(test_provider( - "Anthropic", ai::anthropic::models::kClaudeHaiku35, simple_question)); + "Anthropic", ai::anthropic::models::kClaudeHaiku45, simple_question)); for (const auto& result : results1) { print_result(result); @@ -118,7 +118,7 @@ int main() { results2.push_back( test_provider("OpenAI", ai::openai::models::kGpt4o, creative_prompt)); results2.push_back(test_provider( - "Anthropic", ai::anthropic::models::kClaudeSonnet35, creative_prompt)); + "Anthropic", ai::anthropic::models::kClaudeSonnet45, creative_prompt)); for (const auto& result : results2) { print_result(result); @@ -137,7 +137,7 @@ int main() { results3.push_back( test_provider("OpenAI", ai::openai::models::kGpt4o, technical_prompt)); results3.push_back(test_provider( - "Anthropic", ai::anthropic::models::kClaudeSonnet35, technical_prompt)); + "Anthropic", ai::anthropic::models::kClaudeSonnet45, technical_prompt)); for (const auto& result : results3) { print_result(result); diff --git a/examples/openrouter_example.cpp b/examples/openrouter_example.cpp index c4b1fe9..0175787 100644 --- a/examples/openrouter_example.cpp +++ b/examples/openrouter_example.cpp @@ -32,11 +32,11 @@ int main() { std::cout << "Testing text generation with OpenRouter...\n\n"; // Using a model that's available on OpenRouter - // Common models: "openai/gpt-3.5-turbo", "anthropic/claude-3.5-sonnet", + // Common models: "openai/gpt-4o", "anthropic/claude-sonnet-4-5", // "meta-llama/llama-3.1-8b-instruct" See https://openrouter.ai/models for // available models ai::GenerateOptions options( - "anthropic/claude-3.5-sonnet", "You are a helpful assistant.", + "anthropic/claude-sonnet-4-5", "You are a helpful assistant.", "What are the benefits of using OpenRouter for AI applications? Give a " "brief answer."); @@ -55,7 +55,7 @@ int main() { // Test streaming with OpenRouter std::cout << "\n\nTesting streaming with OpenRouter...\n"; - ai::GenerateOptions stream_opts("anthropic/claude-3.5-sonnet", + ai::GenerateOptions stream_opts("anthropic/claude-sonnet-4-5", "You are a creative writer.", "Write a haiku about API compatibility."); ai::StreamOptions stream_options(stream_opts); diff --git a/examples/test_tool_integration.cpp b/examples/test_tool_integration.cpp index d725279..ea7dacc 100644 --- a/examples/test_tool_integration.cpp +++ b/examples/test_tool_integration.cpp @@ -86,7 +86,7 @@ void test_anthropic_tools() { {{"input", "string"}}, simple_test_tool)}}; ai::GenerateOptions options; - options.model = ai::anthropic::models::kClaudeSonnet35; + options.model = ai::anthropic::models::kClaudeSonnet45; options.prompt = "Please use the test_tool with input 'hello anthropic'"; options.tools = tools; options.tool_choice = diff --git a/examples/tool_calling_async.cpp b/examples/tool_calling_async.cpp index c536447..073383c 100644 --- a/examples/tool_calling_async.cpp +++ b/examples/tool_calling_async.cpp @@ -337,12 +337,12 @@ int main() { auto anthropic_client = ai::anthropic::create_client(); - std::cout << "Testing async tools with Claude 3.5 Sonnet\n\n"; + std::cout << "Testing async tools with Claude Sonnet 4.5\n\n"; start_time = std::chrono::high_resolution_clock::now(); ai::GenerateOptions anthropic_options; - anthropic_options.model = ai::anthropic::models::kClaudeSonnet35; + anthropic_options.model = ai::anthropic::models::kClaudeSonnet45; anthropic_options.prompt = R"( Please help me with these THREE tasks. You MUST use the tools to complete ALL of them: 1. Use the fetch_news tool to get tech news articles diff --git a/include/ai/ai.h b/include/ai/ai.h index 022712b..9a815d9 100644 --- a/include/ai/ai.h +++ b/include/ai/ai.h @@ -74,7 +74,7 @@ /// ```cpp /// auto client = ai::anthropic::create_client(); /// auto result = client.generate_text({ -/// .model = ai::anthropic::models::kClaude35Sonnet, +/// .model = ai::anthropic::models::kClaudeSonnet45, /// .system = "You are a helpful assistant.", /// .prompt = "Explain quantum computing in simple terms." /// }); diff --git a/include/ai/anthropic.h b/include/ai/anthropic.h index c750bc1..01ab168 100644 --- a/include/ai/anthropic.h +++ b/include/ai/anthropic.h @@ -14,7 +14,15 @@ namespace ai { namespace anthropic { namespace models { -/// Common Anthropic model identifiers +/// Common Anthropic model identifiers (Latest models) +constexpr const char* kClaudeSonnet45 = + "claude-sonnet-4-5"; // claude-sonnet-4-5-20250929 +constexpr const char* kClaudeHaiku45 = + "claude-haiku-4-5"; // claude-haiku-4-5-20251001 +constexpr const char* kClaudeOpus41 = + "claude-opus-4-1"; // claude-opus-4-1-20250805 + +/// Legacy model identifiers (retained for backward compatibility) constexpr const char* kClaudeOpus4 = "claude-opus-4-0"; // claude-opus-4-20250514 constexpr const char* kClaudeSonnet4 = @@ -22,12 +30,12 @@ constexpr const char* kClaudeSonnet4 = constexpr const char* kClaudeSonnet37 = "claude-3-7-sonnet-latest"; // claude-3-7-sonnet-20250219 constexpr const char* kClaudeSonnet35 = - "claude-3-5-sonnet-latest"; // claude-3-5-sonnet-20241022 + "claude-3-5-sonnet-latest"; // claude-3-5-sonnet-20241022 (DEPRECATED) constexpr const char* kClaudeHaiku35 = "claude-3-5-haiku-latest"; // claude-3-5-haiku-20241022 /// Default model used when none is specified -constexpr const char* kDefaultModel = kClaudeSonnet35; +constexpr const char* kDefaultModel = kClaudeSonnet45; } // namespace models /// Create an Anthropic client with default configuration diff --git a/src/providers/anthropic/anthropic_client.cpp b/src/providers/anthropic/anthropic_client.cpp index bedc098..337de98 100644 --- a/src/providers/anthropic/anthropic_client.cpp +++ b/src/providers/anthropic/anthropic_client.cpp @@ -59,8 +59,13 @@ std::string AnthropicClient::provider_name() const { } std::vector AnthropicClient::supported_models() const { - return {"claude-3-5-sonnet-20241022", "claude-3-5-haiku-20241022", - "claude-3-opus-20240229", "claude-3-sonnet-20240229", + return {"claude-sonnet-4-5-20250929", // Latest Sonnet 4.5 + "claude-haiku-4-5-20251001", // Latest Haiku 4.5 + "claude-opus-4-1-20250805", // Latest Opus 4.1 + "claude-sonnet-4-20250514", // Sonnet 4.0 + "claude-3-5-sonnet-20241022", // Legacy 3.5 Sonnet + "claude-3-5-haiku-20241022", // Legacy 3.5 Haiku + "claude-3-opus-20240229", "claude-3-sonnet-20240229", "claude-3-haiku-20240307"}; } diff --git a/tests/integration/anthropic_integration_test.cpp b/tests/integration/anthropic_integration_test.cpp index d3a12fc..4ec6e5c 100644 --- a/tests/integration/anthropic_integration_test.cpp +++ b/tests/integration/anthropic_integration_test.cpp @@ -89,7 +89,7 @@ TEST_F(AnthropicIntegrationTest, TextGenerationWithParameters) { "Write a very short story about a cat."); options.max_tokens = 50; options.temperature = 0.7; - options.top_p = 0.9; + // Note: Claude 4.5 doesn't allow both temperature and top_p to be set auto result = client_->generate_text(options); @@ -135,7 +135,7 @@ TEST_F(AnthropicIntegrationTest, DifferentModelSupport) { std::vector models_to_test = { ai::anthropic::models::kDefaultModel, - ai::anthropic::models::kClaudeHaiku35}; + ai::anthropic::models::kClaudeHaiku45}; for (const auto& model : models_to_test) { if (!client_->supports_model(model)) { @@ -278,7 +278,7 @@ TEST_F(AnthropicIntegrationTest, LargePromptHandling) { auto large_prompt = TestDataGenerator::createLargePrompt(2000); // ~2KB prompt - GenerateOptions options(ai::anthropic::models::kClaudeSonnet35, large_prompt); + GenerateOptions options(ai::anthropic::models::kClaudeSonnet45, large_prompt); auto result = client_->generate_text(options); TestAssertions::assertSuccess(result); @@ -299,7 +299,7 @@ TEST_F(AnthropicIntegrationTest, CustomBaseUrl) { auto custom_client = ai::anthropic::create_client(api_key, "https://api.anthropic.com"); - GenerateOptions options(ai::anthropic::models::kClaudeSonnet35, + GenerateOptions options(ai::anthropic::models::kClaudeSonnet45, "Test custom base URL"); auto result = custom_client.generate_text(options); @@ -313,7 +313,7 @@ TEST_F(AnthropicIntegrationTest, EmptyPrompt) { GTEST_SKIP() << "No ANTHROPIC_API_KEY environment variable set"; } - GenerateOptions options(ai::anthropic::models::kClaudeSonnet35, ""); + GenerateOptions options(ai::anthropic::models::kClaudeSonnet45, ""); // Empty prompt should be caught by validation EXPECT_FALSE(options.is_valid()); @@ -328,7 +328,7 @@ TEST_F(AnthropicIntegrationTest, VeryLongResponse) { GTEST_SKIP() << "No ANTHROPIC_API_KEY environment variable set"; } - GenerateOptions options(ai::anthropic::models::kClaudeSonnet35, + GenerateOptions options(ai::anthropic::models::kClaudeSonnet45, "Write a detailed explanation of quantum physics"); options.max_tokens = 500; // Reasonable limit for testing @@ -349,7 +349,7 @@ TEST_F(AnthropicIntegrationTest, NetworkTimeout) { // Note: This test documents timeout behavior but cannot reliably trigger it // with the real API under normal conditions - GenerateOptions options(ai::anthropic::models::kClaudeSonnet35, + GenerateOptions options(ai::anthropic::models::kClaudeSonnet45, "Simple test"); auto result = client_->generate_text(options); @@ -375,7 +375,7 @@ TEST_F(AnthropicIntegrationTest, NetworkFailure) { auto failing_client = ai::anthropic::create_client( api_key, "http://localhost:59999"); // Very unlikely port to be in use - GenerateOptions options(ai::anthropic::models::kClaudeSonnet35, + GenerateOptions options(ai::anthropic::models::kClaudeSonnet45, "Test network failure"); auto result = failing_client.generate_text(options); @@ -426,7 +426,7 @@ TEST_F(AnthropicIntegrationTest, MaxTokensRequired) { GTEST_SKIP() << "No ANTHROPIC_API_KEY environment variable set"; } - GenerateOptions options(ai::anthropic::models::kClaudeSonnet35, + GenerateOptions options(ai::anthropic::models::kClaudeSonnet45, "Tell me about artificial intelligence"); // Anthropic requires max_tokens to be set options.max_tokens = 100; @@ -445,7 +445,7 @@ TEST_F(AnthropicIntegrationTest, SystemMessageHandling) { // Anthropic has specific handling for system messages GenerateOptions options( - ai::anthropic::models::kClaudeSonnet35, + ai::anthropic::models::kClaudeSonnet45, "You are Claude, an AI assistant created by Anthropic.", "What is your name?"); options.max_tokens = 50; @@ -475,11 +475,11 @@ TEST_F(AnthropicIntegrationTest, DefaultModelGeneration) { EXPECT_EQ(client_->default_model(), ai::anthropic::models::kDefaultModel); if (result.model.has_value()) { // Anthropic returns the full model version (e.g., - // "claude-3-5-sonnet-20241022") while kDefaultModel is - // "claude-3-5-sonnet-latest" - EXPECT_TRUE(result.model.value().find("claude-3-5-sonnet") != + // "claude-sonnet-4-5-20250929") while kDefaultModel is + // "claude-sonnet-4-5" + EXPECT_TRUE(result.model.value().find("claude-sonnet-4-5") != std::string::npos) - << "Expected model to contain 'claude-3-5-sonnet', but got: " + << "Expected model to contain 'claude-sonnet-4-5', but got: " << result.model.value(); } } diff --git a/tests/integration/multi_step_duplicate_execution_test.cpp b/tests/integration/multi_step_duplicate_execution_test.cpp index f4b5088..7df0029 100644 --- a/tests/integration/multi_step_duplicate_execution_test.cpp +++ b/tests/integration/multi_step_duplicate_execution_test.cpp @@ -41,7 +41,7 @@ class MultiStepDuplicateExecutionTest if (api_key) { use_real_api_ = true; client_ = ai::anthropic::create_client(api_key); - model_ = ai::anthropic::models::kClaudeSonnet35; + model_ = ai::anthropic::models::kClaudeSonnet45; } else { use_real_api_ = false; } diff --git a/tests/integration/tool_calling_integration_test.cpp b/tests/integration/tool_calling_integration_test.cpp index 96e28d9..415cc98 100644 --- a/tests/integration/tool_calling_integration_test.cpp +++ b/tests/integration/tool_calling_integration_test.cpp @@ -146,7 +146,7 @@ class ToolCallingIntegrationTest if (api_key) { use_real_api_ = true; client_ = ai::anthropic::create_client(api_key); - model_ = ai::anthropic::models::kClaudeSonnet35; + model_ = ai::anthropic::models::kClaudeSonnet45; } else { use_real_api_ = false; } @@ -672,7 +672,7 @@ class AnthropicSpecificToolTest : public ::testing::Test { if (api_key) { use_real_api_ = true; client_ = ai::anthropic::create_client(api_key); - model_ = ai::anthropic::models::kClaudeSonnet35; + model_ = ai::anthropic::models::kClaudeSonnet45; } else { use_real_api_ = false; } @@ -698,8 +698,13 @@ TEST_F(AnthropicSpecificToolTest, MaxTokensRequiredWithTools) { auto result = client_->generate_text(options); - TestAssertions::assertSuccess(result); - EXPECT_FALSE(result.text.empty()); + // Verify the request succeeded (may have tool calls instead of text) + EXPECT_TRUE(result.is_success()) + << "Expected successful result but got error: " << result.error_message(); + EXPECT_FALSE(result.error.has_value()); + + // Note: When tools are provided, the model may return tool calls instead of + // text This test just verifies that requests with tools + max_tokens succeed EXPECT_LE(result.usage.completion_tokens, 200); } diff --git a/tests/unit/anthropic_client_test.cpp b/tests/unit/anthropic_client_test.cpp index 521eb2b..07240a3 100644 --- a/tests/unit/anthropic_client_test.cpp +++ b/tests/unit/anthropic_client_test.cpp @@ -81,17 +81,17 @@ TEST_F(AnthropicClientTest, ConstructorWithHttpUrl) { TEST_F(AnthropicClientTest, SupportedModelsContainsExpectedModels) { auto models = client_->supported_models(); - EXPECT_THAT(models, testing::Contains("claude-3-5-sonnet-20241022")); - EXPECT_THAT(models, testing::Contains("claude-3-haiku-20240307")); - EXPECT_THAT(models, testing::Contains("claude-3-opus-20240229")); - EXPECT_THAT(models, testing::Contains("claude-3-sonnet-20240229")); + EXPECT_THAT(models, testing::Contains("claude-sonnet-4-5-20250929")); + EXPECT_THAT(models, testing::Contains("claude-haiku-4-5-20251001")); + EXPECT_THAT(models, testing::Contains("claude-opus-4-1-20250805")); + EXPECT_THAT(models, testing::Contains("claude-sonnet-4-20250514")); EXPECT_FALSE(models.empty()); } TEST_F(AnthropicClientTest, SupportsValidModel) { - EXPECT_TRUE(client_->supports_model("claude-3-5-sonnet-20241022")); - EXPECT_TRUE(client_->supports_model("claude-3-haiku-20240307")); - EXPECT_TRUE(client_->supports_model("claude-3-opus-20240229")); + EXPECT_TRUE(client_->supports_model("claude-sonnet-4-5-20250929")); + EXPECT_TRUE(client_->supports_model("claude-haiku-4-5-20251001")); + EXPECT_TRUE(client_->supports_model("claude-opus-4-1-20250805")); } TEST_F(AnthropicClientTest, DoesNotSupportInvalidModel) { diff --git a/tests/unit/anthropic_stream_test.cpp b/tests/unit/anthropic_stream_test.cpp index 350ab56..518eeef 100644 --- a/tests/unit/anthropic_stream_test.cpp +++ b/tests/unit/anthropic_stream_test.cpp @@ -15,19 +15,19 @@ class AnthropicStreamTest : public AnthropicTestFixture {}; // StreamOptions Tests TEST_F(AnthropicStreamTest, StreamOptionsBasicConstructor) { StreamOptions options( - GenerateOptions("claude-3-5-sonnet-20241022", "Hello, world!")); + GenerateOptions("claude-sonnet-4-5-20250929", "Hello, world!")); - EXPECT_EQ(options.model, "claude-3-5-sonnet-20241022"); + EXPECT_EQ(options.model, "claude-sonnet-4-5-20250929"); EXPECT_EQ(options.prompt, "Hello, world!"); EXPECT_TRUE(options.system.empty()); EXPECT_TRUE(options.messages.empty()); } TEST_F(AnthropicStreamTest, StreamOptionsWithSystemPrompt) { - StreamOptions options(GenerateOptions("claude-3-5-sonnet-20241022", + StreamOptions options(GenerateOptions("claude-sonnet-4-5-20250929", "System prompt", "User prompt")); - EXPECT_EQ(options.model, "claude-3-5-sonnet-20241022"); + EXPECT_EQ(options.model, "claude-sonnet-4-5-20250929"); EXPECT_EQ(options.system, "System prompt"); EXPECT_EQ(options.prompt, "User prompt"); } @@ -35,23 +35,23 @@ TEST_F(AnthropicStreamTest, StreamOptionsWithSystemPrompt) { TEST_F(AnthropicStreamTest, StreamOptionsWithMessages) { Messages messages = createSampleAnthropicConversation(); StreamOptions options( - GenerateOptions("claude-3-5-sonnet-20241022", std::move(messages))); + GenerateOptions("claude-sonnet-4-5-20250929", std::move(messages))); - EXPECT_EQ(options.model, "claude-3-5-sonnet-20241022"); + EXPECT_EQ(options.model, "claude-sonnet-4-5-20250929"); EXPECT_FALSE(options.messages.empty()); EXPECT_TRUE(options.has_messages()); } TEST_F(AnthropicStreamTest, StreamOptionsValidation) { StreamOptions valid_options( - GenerateOptions("claude-3-5-sonnet-20241022", "Valid prompt")); + GenerateOptions("claude-sonnet-4-5-20250929", "Valid prompt")); EXPECT_TRUE(valid_options.is_valid()); StreamOptions invalid_model(GenerateOptions("", "Valid prompt")); EXPECT_FALSE(invalid_model.is_valid()); StreamOptions invalid_prompt( - GenerateOptions("claude-3-5-sonnet-20241022", "")); + GenerateOptions("claude-sonnet-4-5-20250929", "")); EXPECT_FALSE(invalid_prompt.is_valid()); } @@ -190,7 +190,7 @@ class AnthropicStreamErrorTest : public AnthropicTestFixture {}; TEST_F(AnthropicStreamErrorTest, HandleStreamConnectionError) { ControllableAnthropicClient client(kTestAnthropicApiKey); StreamOptions options( - GenerateOptions("claude-3-5-sonnet-20241022", "Test prompt")); + GenerateOptions("claude-sonnet-4-5-20250929", "Test prompt")); client.setShouldFail(true); @@ -204,7 +204,7 @@ TEST_F(AnthropicStreamErrorTest, HandleStreamConnectionError) { TEST_F(AnthropicStreamErrorTest, HandleStreamTimeout) { ControllableAnthropicClient client(kTestAnthropicApiKey); StreamOptions options( - GenerateOptions("claude-3-5-sonnet-20241022", "Test prompt")); + GenerateOptions("claude-sonnet-4-5-20250929", "Test prompt")); client.setShouldTimeout(true); @@ -285,13 +285,13 @@ TEST_F(AnthropicStreamIntegrationTest, StreamWithClientConfiguration) { EXPECT_TRUE(client.is_valid()); StreamOptions options( - GenerateOptions("claude-3-5-sonnet-20241022", "Stream test")); + GenerateOptions("claude-sonnet-4-5-20250929", "Stream test")); auto result = client.stream_text(options); EXPECT_EQ(client.getCallCount(), 1); auto last_options = client.getLastStreamOptions(); - EXPECT_EQ(last_options.model, "claude-3-5-sonnet-20241022"); + EXPECT_EQ(last_options.model, "claude-sonnet-4-5-20250929"); EXPECT_EQ(last_options.prompt, "Stream test"); } @@ -347,12 +347,12 @@ TEST_F(AnthropicStreamEventTypesTest, MessageStartEvent) { std::string event_data = "data: " "{\"type\":\"message_start\",\"message\":{\"id\":\"msg_123\",\"type\":" - "\"message\",\"role\":\"assistant\",\"content\":[],\"model\":\"claude-3-" - "5-sonnet-20241022\",\"stop_reason\":null,\"stop_sequence\":null," + "\"message\",\"role\":\"assistant\",\"content\":[],\"model\":\"claude-" + "sonnet-4-5-20250929\",\"stop_reason\":null,\"stop_sequence\":null," "\"usage\":{\"input_tokens\":25,\"output_tokens\":1}}}\n\n"; EXPECT_THAT(event_data, testing::HasSubstr("message_start")); - EXPECT_THAT(event_data, testing::HasSubstr("claude-3-5-sonnet-20241022")); + EXPECT_THAT(event_data, testing::HasSubstr("claude-sonnet-4-5-20250929")); EXPECT_THAT(event_data, testing::HasSubstr("input_tokens")); } diff --git a/tests/utils/mock_anthropic_client.cpp b/tests/utils/mock_anthropic_client.cpp index 170ced2..fd53888 100644 --- a/tests/utils/mock_anthropic_client.cpp +++ b/tests/utils/mock_anthropic_client.cpp @@ -132,8 +132,8 @@ std::string ControllableAnthropicClient::provider_name() const { } std::vector ControllableAnthropicClient::supported_models() const { - return {"claude-3-5-sonnet-20241022", "claude-3-haiku-20240307", - "claude-3-opus-20240229", "claude-3-sonnet-20240229"}; + return {"claude-sonnet-4-5-20250929", "claude-haiku-4-5-20251001", + "claude-opus-4-1-20250805", "claude-sonnet-4-20250514"}; } bool ControllableAnthropicClient::supports_model( diff --git a/tests/utils/mock_anthropic_client.h b/tests/utils/mock_anthropic_client.h index 6ae79c8..7f05cfe 100644 --- a/tests/utils/mock_anthropic_client.h +++ b/tests/utils/mock_anthropic_client.h @@ -94,7 +94,7 @@ class AnthropicResponseBuilder { public: static std::string buildSuccessResponse( const std::string& content = "Test response", - const std::string& model = "claude-3-5-sonnet-20241022", + const std::string& model = "claude-sonnet-4-5-20250929", int input_tokens = 10, int output_tokens = 20); diff --git a/tests/utils/test_fixtures.cpp b/tests/utils/test_fixtures.cpp index d45814e..9a861f7 100644 --- a/tests/utils/test_fixtures.cpp +++ b/tests/utils/test_fixtures.cpp @@ -271,7 +271,7 @@ std::vector TestDataGenerator::createAnthropicStreamingEvents() { "data: " "{\"type\":\"message_start\",\"message\":{\"id\":\"msg_stream1\"," "\"type\":\"message\",\"role\":\"assistant\",\"content\":[],\"model\":" - "\"claude-3-5-sonnet-20241022\",\"stop_reason\":null,\"stop_sequence\":" + "\"claude-sonnet-4-5-20250929\",\"stop_reason\":null,\"stop_sequence\":" "null,\"usage\":{\"input_tokens\":25,\"output_tokens\":1}}}\n\n", "data: " "{\"type\":\"content_block_start\",\"index\":0,\"content_block\":{" diff --git a/tests/utils/test_fixtures.h b/tests/utils/test_fixtures.h index 4c3ae7f..76fe4e9 100644 --- a/tests/utils/test_fixtures.h +++ b/tests/utils/test_fixtures.h @@ -26,7 +26,7 @@ class AITestFixture : public ::testing::Test { // Anthropic test data static constexpr const char* kTestAnthropicApiKey = "sk-ant-test123456789"; static constexpr const char* kTestAnthropicModel = - "claude-3-5-sonnet-20241022"; + "claude-sonnet-4-5-20250929"; static constexpr const char* kTestAnthropicBaseUrl = "https://api.anthropic.com"; };