From 7ed7a6d62f40c65f70d2aa667709e8565ca7d54c Mon Sep 17 00:00:00 2001 From: Pavel Tisnovsky Date: Wed, 1 Oct 2025 14:59:49 +0200 Subject: [PATCH] LCORE-650: update Llama Stack version in documentation and examples --- README.md | 2 +- docs/deployment_guide.md | 2 +- docs/getting_started.md | 4 ++-- docs/openapi.json | 4 ++-- docs/openapi.md | 2 +- docs/output.md | 2 +- examples/pyproject.llamastack.toml | 2 +- src/models/responses.py | 2 +- 8 files changed, 10 insertions(+), 10 deletions(-) diff --git a/README.md b/README.md index 03f629fa5..505190f30 100644 --- a/README.md +++ b/README.md @@ -245,7 +245,7 @@ version = "0.1.0" description = "Llama Stack runner" authors = [] dependencies = [ - "llama-stack==0.2.20", + "llama-stack==0.2.21", "fastapi>=0.115.12", "opentelemetry-sdk>=1.34.0", "opentelemetry-exporter-otlp>=1.34.0", diff --git a/docs/deployment_guide.md b/docs/deployment_guide.md index 82574c5a1..296718d4a 100644 --- a/docs/deployment_guide.md +++ b/docs/deployment_guide.md @@ -390,7 +390,7 @@ cp examples/run.yaml /tmp/llama-stack-server The output should be in this form: ```json { - "version": "0.2.20" + "version": "0.2.21" } ``` diff --git a/docs/getting_started.md b/docs/getting_started.md index 18d5fd387..5752c7154 100644 --- a/docs/getting_started.md +++ b/docs/getting_started.md @@ -24,7 +24,7 @@ It is possible to run Lightspeed Core Stack service with Llama Stack "embedded" 1. Add and install all required dependencies ```bash uv add \ - "llama-stack==0.2.20" \ + "llama-stack==0.2.21" \ "fastapi>=0.115.12" \ "opentelemetry-sdk>=1.34.0" \ "opentelemetry-exporter-otlp>=1.34.0" \ @@ -383,4 +383,4 @@ curl -X POST "http://localhost:8080/v1/query" \ ``` #### Step 4: Verify connectivity -After starting the MCP servers and updating `lightspeed-stack.yaml`, test by sending a prompt to the AI agent. LCS evaluates the prompt against available tools’ metadata, selects the appropriate tool, calls the corresponding MCP server, and uses the result to generate more accurate agent response. \ No newline at end of file +After starting the MCP servers and updating `lightspeed-stack.yaml`, test by sending a prompt to the AI agent. LCS evaluates the prompt against available tools’ metadata, selects the appropriate tool, calls the corresponding MCP server, and uses the result to generate more accurate agent response. diff --git a/docs/openapi.json b/docs/openapi.json index c9cf4fbc7..4928dd028 100644 --- a/docs/openapi.json +++ b/docs/openapi.json @@ -2153,7 +2153,7 @@ "llama_stack_version" ], "title": "InfoResponse", - "description": "Model representing a response to an info request.\n\nAttributes:\n name: Service name.\n service_version: Service version.\n llama_stack_version: Llama Stack version.\n\nExample:\n ```python\n info_response = InfoResponse(\n name=\"Lightspeed Stack\",\n service_version=\"1.0.0\",\n llama_stack_version=\"0.2.20\",\n )\n ```", + "description": "Model representing a response to an info request.\n\nAttributes:\n name: Service name.\n service_version: Service version.\n llama_stack_version: Llama Stack version.\n\nExample:\n ```python\n info_response = InfoResponse(\n name=\"Lightspeed Stack\",\n service_version=\"1.0.0\",\n llama_stack_version=\"0.2.21\",\n )\n ```", "examples": [ { "llama_stack_version": "1.0.0", @@ -3159,4 +3159,4 @@ } } } -} \ No newline at end of file +} diff --git a/docs/openapi.md b/docs/openapi.md index 0f70b78b3..71b20056b 100644 --- a/docs/openapi.md +++ b/docs/openapi.md @@ -1011,7 +1011,7 @@ Example: info_response = InfoResponse( name="Lightspeed Stack", service_version="1.0.0", - llama_stack_version="0.2.20", + llama_stack_version="0.2.21", ) ``` diff --git a/docs/output.md b/docs/output.md index 0f70b78b3..71b20056b 100644 --- a/docs/output.md +++ b/docs/output.md @@ -1011,7 +1011,7 @@ Example: info_response = InfoResponse( name="Lightspeed Stack", service_version="1.0.0", - llama_stack_version="0.2.20", + llama_stack_version="0.2.21", ) ``` diff --git a/examples/pyproject.llamastack.toml b/examples/pyproject.llamastack.toml index 11c3e8731..fd4ae2a4a 100644 --- a/examples/pyproject.llamastack.toml +++ b/examples/pyproject.llamastack.toml @@ -4,7 +4,7 @@ version = "0.1.0" description = "Default template for PDM package" authors = [] dependencies = [ - "llama-stack==0.2.20", + "llama-stack==0.2.21", "fastapi>=0.115.12", "opentelemetry-sdk>=1.34.0", "opentelemetry-exporter-otlp>=1.34.0", diff --git a/src/models/responses.py b/src/models/responses.py index f44b79ea7..7674b8848 100644 --- a/src/models/responses.py +++ b/src/models/responses.py @@ -164,7 +164,7 @@ class InfoResponse(BaseModel): info_response = InfoResponse( name="Lightspeed Stack", service_version="1.0.0", - llama_stack_version="0.2.20", + llama_stack_version="0.2.21", ) ``` """