Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
127 changes: 73 additions & 54 deletions .github/workflows/e2e_tests.yaml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# .github/workflows/e2e_test.yml
# .github/workflows/e2e_tests.yml
name: E2E Tests

on: [push, pull_request_target]
Expand All @@ -9,13 +9,18 @@ jobs:
strategy:
fail-fast: false
matrix:
environment: [ "ci", "azure"]
mode: ["server", "library"]
environment: ["ci", "azure"]

name: "E2E: ${{ matrix.mode }} mode / ${{ matrix.environment }}"

env:
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
E2E_OPENAI_MODEL: ${{ vars.E2E_OPENAI_MODEL }}
CLIENT_SECRET: ${{ secrets.CLIENT_SECRET }}
CLIENT_ID: ${{ secrets.CLIENT_ID }}
TENANT_ID: ${{ secrets.TENANT_ID }}
E2E_DEPLOYMENT_MODE: ${{ matrix.mode }}

steps:
- uses: actions/checkout@v4
Expand All @@ -41,42 +46,24 @@ jobs:
echo "Current commit: $(git rev-parse HEAD)"
echo "Current commit message: $(git log -1 --oneline)"
echo ""
echo "=== Recent commits (should show setup-metrics commits) ==="
echo "=== Recent commits ==="
git log --oneline -5

- uses: 1arp/create-a-file-action@0.4.5
env:
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
with:
path: '.'
isAbsolutePath: false
file: 'lightspeed-stack.yaml'
content: |
name: Lightspeed Core Service (LCS)
service:
host: 0.0.0.0
port: 8080
auth_enabled: false
workers: 1
color_log: true
access_log: true
llama_stack:
# Uses a remote llama-stack service
# The instance would have already been started with a llama-stack-run.yaml file
use_as_library_client: false
# Alternative for "as library use"
# use_as_library_client: true
# library_client_config_path: <path-to-llama-stack-run.yaml-file>
url: http://llama-stack:8321
api_key: xyzzy
user_data_collection:
feedback_enabled: true
feedback_storage: "/tmp/data/feedback"
transcripts_enabled: true
transcripts_storage: "/tmp/data/transcripts"

authentication:
module: "noop"
- name: Load lightspeed-stack.yaml configuration
run: |
MODE="${{ matrix.mode }}"
CONFIG_FILE="tests/e2e/configuration/lightspeed-stack-${MODE}-mode.yaml"

echo "Loading configuration for ${MODE} mode"
echo "Source: ${CONFIG_FILE}"

if [ ! -f "${CONFIG_FILE}" ]; then
echo "❌ Configuration file not found: ${CONFIG_FILE}"
exit 1
fi

cp "${CONFIG_FILE}" lightspeed-stack.yaml
echo "✅ Configuration loaded successfully"

- name: Get Azure API key (access token)
if: matrix.environment == 'azure'
Expand Down Expand Up @@ -106,7 +93,7 @@ jobs:

- name: Select and configure run.yaml
env:
CONFIG_ENVIRONMENT: ${{ matrix.environment || 'ci' }}
CONFIG_ENVIRONMENT: ${{ matrix.environment }}
run: |
CONFIGS_DIR="tests/e2e/configs"
ENVIRONMENT="$CONFIG_ENVIRONMENT"
Expand All @@ -128,37 +115,42 @@ jobs:
echo "Looking for: $CONFIG_FILE"

if [ -f "$CONFIG_FILE" ]; then
echo "Found config for environment: $ENVIRONMENT"
echo "Found config for $ENVIRONMENT environment"
cp "$CONFIG_FILE" run.yaml
else
echo "Configuration file not found: $CONFIG_FILE"
echo "Available files in $CONFIGS_DIR:"
ls -la "$CONFIGS_DIR/"
echo "Available files:"
find "$CONFIGS_DIR" -name "*.yaml"
exit 1
fi

# Update paths for container environment (relative -> absolute)
sed -i 's|db_path: \.llama/distributions|db_path: /app-root/.llama/distributions|g' run.yaml
sed -i 's|db_path: tmp/|db_path: /app-root/.llama/distributions/|g' run.yaml

echo "Successfully configured for environment: $ENVIRONMENT"
echo "Successfully configured for $ENVIRONMENT environment"
echo "Using configuration: $(basename "$CONFIG_FILE")"

- name: Show final configuration
run: |
echo "=== Configuration Summary ==="
echo "Source config: tests/e2e/configs/run-ci.yaml"
echo "Deployment mode: ${{ matrix.mode }}"
echo "Environment: ${{ matrix.environment }}"
echo "Source config: tests/e2e/configs/run-${{ matrix.environment }}.yaml"
echo "Final file: run.yaml"
echo "Container mount: /app-root/run.yaml"
echo ""
echo "=== Final Configuration Preview ==="
echo "=== Configuration Preview ==="
echo "Providers: $(grep -c "provider_id:" run.yaml)"
echo "Models: $(grep -c "model_id:" run.yaml)"
echo ""
echo "=== lightspeed-stack.yaml ==="
grep -A 3 "llama_stack:" lightspeed-stack.yaml

- name: Run service manually
- name: Run services (Server Mode)
if: matrix.mode == 'server'
env:
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
AZURE_API_KEY: ${{ env.AZURE_API_KEY }}
run: |
# Debug: Check if environment variable is available for docker-compose
echo "OPENAI_API_KEY is set: $([ -n "$OPENAI_API_KEY" ] && echo 'YES' || echo 'NO')"
Expand All @@ -176,24 +168,46 @@ jobs:
echo "All services started successfully"
fi

- name: Run services (Library Mode)
if: matrix.mode == 'library'
env:
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
AZURE_API_KEY: ${{ env.AZURE_API_KEY }}
run: |
echo "Starting service in library mode (1 container)"
docker compose -f docker-compose-library.yaml up -d

if docker compose -f docker-compose-library.yaml ps | grep -E 'Exit|exited|stopped'; then
echo "Service failed to start - showing logs:"
docker compose -f docker-compose-library.yaml logs
exit 1
else
echo "Service started successfully"
fi

- name: Wait for services
run: |
echo "Waiting for services to be healthy..."
sleep 20 # adjust depending on boot time
sleep 20

- name: Quick connectivity test
run: |
echo "Testing basic connectivity before full test suite..."
curl -f http://localhost:8080/v1/models || {
echo "❌ Basic connectivity failed - showing logs before running full tests"
docker compose logs --tail=30
echo "❌ Basic connectivity failed - showing logs"
if [ "${{ matrix.mode }}" == "server" ]; then
docker compose logs --tail=30
else
docker compose -f docker-compose-library.yaml logs --tail=30
fi
exit 1
}

- name: Run e2e tests
env:
TERM: xterm-256color
FORCE_COLOR: 1
E2E_DEPLOYMENT_MODE: ${{ matrix.mode }}
run: |
echo "Installing test dependencies..."
pip install uv
Expand All @@ -206,9 +220,14 @@ jobs:
if: failure()
run: |
echo "=== Test failure logs ==="
echo "=== llama-stack logs ==="
docker compose logs llama-stack

echo ""
echo "=== lightspeed-stack logs ==="
docker compose logs lightspeed-stack
if [ "${{ matrix.mode }}" == "server" ]; then
echo "=== llama-stack logs ==="
docker compose logs llama-stack
echo ""
echo "=== lightspeed-stack logs ==="
docker compose logs lightspeed-stack
else
echo "=== lightspeed-stack (library mode) logs ==="
docker compose -f docker-compose-library.yaml logs lightspeed-stack
fi
4 changes: 4 additions & 0 deletions Containerfile
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,10 @@ USER root
# Additional tools for derived images
RUN microdnf install -y --nodocs --setopt=keepcache=0 --setopt=tsflags=nodocs jq patch

# Create llama-stack directories for library mode
RUN mkdir -p /opt/app-root/src/.llama/distributions/ollama /opt/app-root/src/.llama/providers.d && \
chown -R 1001:1001 /opt/app-root/src/.llama

# Add executables from .venv to system PATH
ENV PATH="/app-root/.venv/bin:$PATH"

Expand Down
4 changes: 4 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,10 @@ test-integration: ## Run integration tests tests
test-e2e: ## Run end to end tests for the service
script -q -e -c "uv run behave --color --format pretty --tags=-skip -D dump_errors=true @tests/e2e/test_list.txt"

test-e2e-local: ## Run end to end tests for the service
uv run behave --color --format pretty --tags=-skip -D dump_errors=true @tests/e2e/test_list.txt


check-types: ## Checks type hints in sources
uv run mypy --explicit-package-bases --disallow-untyped-calls --disallow-untyped-defs --disallow-incomplete-defs --ignore-missing-imports --disable-error-code attr-defined src/ tests/unit tests/integration tests/e2e/

Expand Down
37 changes: 37 additions & 0 deletions docker-compose-library.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
services:
# Lightspeed Stack with embedded llama-stack (library mode)
lightspeed-stack:
build:
context: .
dockerfile: Containerfile
platform: linux/amd64
container_name: lightspeed-stack
ports:
- "8080:8080"
volumes:
# Mount both config files - lightspeed-stack.yaml should have library mode enabled
- ./lightspeed-stack.yaml:/app-root/lightspeed-stack.yaml:Z
- ./run.yaml:/app-root/run.yaml:Z
environment:
# LLM Provider API Keys
- OPENAI_API_KEY=${OPENAI_API_KEY}
- E2E_OPENAI_MODEL=${E2E_OPENAI_MODEL:-gpt-4-turbo}
- AZURE_API_KEY=${AZURE_API_KEY:-}
- BRAVE_SEARCH_API_KEY=${BRAVE_SEARCH_API_KEY:-}
- TAVILY_SEARCH_API_KEY=${TAVILY_SEARCH_API_KEY:-}
- RHAIIS_URL=${RHAIIS_URL:-}
- RHAIIS_API_KEY=${RHAIIS_API_KEY:-}
- RHAIIS_MODEL=${RHAIIS_MODEL:-}
- RHEL_AI_URL=${RHEL_AI_URL:-}
- RHEL_AI_PORT=${RHEL_AI_PORT:-}
- RHEL_AI_API_KEY=${RHEL_AI_API_KEY:-}
- RHEL_AI_MODEL=${RHEL_AI_MODEL:-}
# Enable debug logging if needed
- LLAMA_STACK_LOGGING=${LLAMA_STACK_LOGGING:-}
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8080/liveness"]
interval: 10s # how often to run the check
timeout: 5s # how long to wait before considering it failed
retries: 3 # how many times to retry before marking as unhealthy
start_period: 15s # time to wait before starting checks (increased for library initialization)

2 changes: 1 addition & 1 deletion run.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ apis:
benchmarks: []
container_image: null
datasets: []
external_providers_dir: null
external_providers_dir: /opt/app-root/src/.llama/providers.d
inference_store:
db_path: .llama/distributions/ollama/inference_store.db
type: sqlite
Expand Down
6 changes: 4 additions & 2 deletions test.containerfile
Original file line number Diff line number Diff line change
@@ -1,12 +1,14 @@
# Custom Red Hat llama-stack image with missing dependencies
FROM quay.io/opendatahub/llama-stack:rhoai-v2.25-latest

# Install missing dependencies
# Install missing dependencies and create required directories
USER root
RUN pip install faiss-cpu==1.11.0 && \
mkdir -p /app-root && \
chown -R 1001:0 /app-root && \
chmod -R 775 /app-root
chmod -R 775 /app-root && \
mkdir -p /opt/app-root/src/.llama/distributions/ollama /opt/app-root/src/.llama/providers.d && \
chown -R 1001:0 /opt/app-root/src/.llama

# Switch back to the original user
USER 1001
2 changes: 1 addition & 1 deletion tests/e2e/configs/run-azure.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ apis:
benchmarks: []
container_image: null
datasets: []
external_providers_dir: null
external_providers_dir: /opt/app-root/src/.llama/providers.d
inference_store:
db_path: .llama/distributions/ollama/inference_store.db
type: sqlite
Expand Down
2 changes: 1 addition & 1 deletion tests/e2e/configs/run-ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ apis:
benchmarks: []
container_image: null
datasets: []
external_providers_dir: null
external_providers_dir: /opt/app-root/src/.llama/providers.d
inference_store:
db_path: .llama/distributions/ollama/inference_store.db
type: sqlite
Expand Down
2 changes: 1 addition & 1 deletion tests/e2e/configs/run-rhelai.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ apis:
benchmarks: []
container_image: null
datasets: []
external_providers_dir: null
external_providers_dir: /opt/app-root/src/.llama/providers.d
inference_store:
db_path: .llama/distributions/ollama/inference_store.db
type: sqlite
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
name: Lightspeed Core Service (LCS)
service:
host: 0.0.0.0
port: 8080
auth_enabled: false
workers: 1
color_log: true
access_log: true
llama_stack:
use_as_library_client: true
library_client_config_path: run.yaml
user_data_collection:
feedback_enabled: true
feedback_storage: "/tmp/data/feedback"
transcripts_enabled: true
transcripts_storage: "/tmp/data/transcripts"

authentication:
module: "noop-with-token"

Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
name: Lightspeed Core Service (LCS)
service:
host: 0.0.0.0
port: 8080
auth_enabled: false
workers: 1
color_log: true
access_log: true
llama_stack:
use_as_library_client: true
library_client_config_path: run.yaml
user_data_collection:
feedback_enabled: true
feedback_storage: "/invalid"
transcripts_enabled: true
transcripts_storage: "/tmp/data/transcripts"

authentication:
module: "noop-with-token"

20 changes: 20 additions & 0 deletions tests/e2e/configuration/lightspeed-stack-library-mode.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
name: Lightspeed Core Service (LCS)
service:
host: 0.0.0.0
port: 8080
auth_enabled: false
workers: 1
color_log: true
access_log: true
llama_stack:
# Library mode - embeds llama-stack as library
use_as_library_client: true
library_client_config_path: run.yaml
user_data_collection:
feedback_enabled: true
feedback_storage: "/tmp/data/feedback"
transcripts_enabled: true
transcripts_storage: "/tmp/data/transcripts"
authentication:
module: "noop"

Loading
Loading