From 244b5375f2a01e75101501b0c7dc424a2f3d9003 Mon Sep 17 00:00:00 2001 From: Gorka Eguileor Date: Tue, 24 Jun 2025 20:05:30 +0200 Subject: [PATCH] Fix llama_stack_client log levels Log levels on the llama_stack_client are currently broken and setting `LLAMA_STACK_LOG=debug` doesn't work as it should. Log levels are automatically changed to WARNING. This is caused by us blindly loading the `llama_stack` library, which initializes logging to WARNING except a subset of modules that can be modified using the `LLAMA_STACK_LOGGING` env var. With this patch we only import `llama_stack` when we are actually going to use it, so that when we use the normal llama-stack client we can set its logging level. --- src/client.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/src/client.py b/src/client.py index d01b0411e..acc166599 100644 --- a/src/client.py +++ b/src/client.py @@ -4,10 +4,6 @@ from typing import Optional -from llama_stack.distribution.library_client import ( - AsyncLlamaStackAsLibraryClient, # type: ignore - LlamaStackAsLibraryClient, # type: ignore -) from llama_stack_client import AsyncLlamaStackClient, LlamaStackClient # type: ignore from models.config import LLamaStackConfiguration from utils.types import Singleton @@ -26,6 +22,7 @@ def load(self, llama_stack_config: LLamaStackConfiguration) -> None: if llama_stack_config.use_as_library_client is True: if llama_stack_config.library_client_config_path is not None: logger.info("Using Llama stack as library client") + from llama_stack.distribution.library_client import LlamaStackAsLibraryClient # type: ignore client = LlamaStackAsLibraryClient( llama_stack_config.library_client_config_path ) @@ -62,6 +59,7 @@ async def load(self, llama_stack_config: LLamaStackConfiguration) -> None: if llama_stack_config.use_as_library_client is True: if llama_stack_config.library_client_config_path is not None: logger.info("Using Llama stack as library client") + from llama_stack.distribution.library_client import AsyncLlamaStackAsLibraryClient # type: ignore client = AsyncLlamaStackAsLibraryClient( llama_stack_config.library_client_config_path )