Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
73 changes: 48 additions & 25 deletions .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -9,47 +9,70 @@ on:
- main

jobs:
lint:
format:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5

- name: "Set up Python"
uses: actions/setup-python@v6
- name: Set up Node.js
uses: actions/setup-node@v4
with:
python-version-file: ".python-version"
node-version: "20"
cache: "npm"

- name: Install uv
uses: astral-sh/setup-uv@v6
with:
enable-cache: true
- name: Install dependencies
run: npm ci

- name: Sync dependencies
run: uv sync --locked --all-extras --dev
- name: Run format check
run: npm run format:check

- name: Run ruff format check
run: uv run ruff format --check .
type-check:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5

- name: Run ruff check
run: uv run ruff check --output-format=github .
- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version: "20"
cache: "npm"

type-check:
- name: Install dependencies
run: npm ci

- name: Run type checking
run: npm run typecheck

test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5

- name: "Set up Python"
uses: actions/setup-python@v6
- name: Set up Node.js
uses: actions/setup-node@v4
with:
python-version-file: ".python-version"
node-version: "20"
cache: "npm"

- name: Install dependencies
run: npm ci

- name: Run tests
run: npm run test -- --run

build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5

- name: Install uv
uses: astral-sh/setup-uv@v6
- name: Set up Node.js
uses: actions/setup-node@v4
with:
enable-cache: true
node-version: "20"
cache: "npm"

- name: Sync dependencies
run: uv sync --locked --all-extras --dev
- name: Install dependencies
run: npm ci

- name: Run type checking
run: uv run ty check --output-format github
- name: Build
run: npm run build
4 changes: 2 additions & 2 deletions docs/extending/full-customization.md
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ For most use cases, you don't need full customization:
### 1. Clone the Repository

```bash
git clone https://github.com/ArtificialAnalysis/stirrup-js.git
git clone https://github.com/ArtificialAnalysis/stirrupJS.git
cd stirrup-js
```

Expand Down Expand Up @@ -402,7 +402,7 @@ See `CONTRIBUTING.md` in the repository.

Check out these projects using StirrupJS:

- [StirrupJS](https://github.com/ArtificialAnalysis/stirrup-js) - The main repository
- [StirrupJS](https://github.com/ArtificialAnalysis/stirrupJS) - The main repository
- [Stirrup](https://github.com/ArtificialAnalysis/Stirrup) - Python version

## Best Practices
Expand Down
2 changes: 1 addition & 1 deletion docs/index.md
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,7 @@ For deep customization of the framework internals, you can clone and modify Stir

```bash
# Clone the repository
git clone https://github.com/ArtificialAnalysis/stirrup-js.git
git clone https://github.com/ArtificialAnalysis/stirrupJS.git
cd stirrup-js

# Install dependencies
Expand Down
4 changes: 2 additions & 2 deletions mkdocs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@ site_name: StirrupJS Documentation
site_url: https://stirrupjs.artificialanalysis.ai
site_description: Lightweight TypeScript/JavaScript framework for building AI agents
site_author: Artificial Analysis, Inc.
repo_url: https://github.com/ArtificialAnalysis/stirrup-js
repo_name: stirrup-js
repo_url: https://github.com/ArtificialAnalysis/stirrupJS
repo_name: stirrupJS

theme:
name: material
Expand Down
9 changes: 1 addition & 8 deletions src/clients/anthropic-client.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,7 @@

import Anthropic from '@anthropic-ai/sdk';
import retry from 'async-retry';
import type {
LLMClient,
ChatMessage,
AssistantMessage,
Tool,
ToolCall,
TokenUsage,
} from '../core/models.js';
import type { LLMClient, ChatMessage, AssistantMessage, Tool, ToolCall, TokenUsage } from '../core/models.js';
import { ContextOverflowError } from '../core/models.js';
import { toAnthropicMessages, toAnthropicTools } from './utils.js';
import { MAX_RETRY_ATTEMPTS, RETRY_MIN_TIMEOUT, RETRY_MAX_TIMEOUT } from '../constants.js';
Expand Down
11 changes: 2 additions & 9 deletions src/clients/openai-client.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,14 +5,7 @@

import OpenAI from 'openai';
import retry from 'async-retry';
import type {
LLMClient,
ChatMessage,
AssistantMessage,
Tool,
ToolCall,
TokenUsage,
} from '../core/models.js';
import type { LLMClient, ChatMessage, AssistantMessage, Tool, ToolCall, TokenUsage } from '../core/models.js';
import { ContextOverflowError } from '../core/models.js';
import { toOpenAIMessages, toOpenAITools } from './utils.js';
import { MAX_RETRY_ATTEMPTS, RETRY_MIN_TIMEOUT, RETRY_MAX_TIMEOUT } from '../constants.js';
Expand Down Expand Up @@ -177,7 +170,7 @@ export class ChatCompletionsClient implements LLMClient {
input: response.usage.prompt_tokens,
output: response.usage.completion_tokens,
reasoning: this.config.includeReasoningTokens
? usageWithDetails.completion_tokens_details?.reasoning_tokens ?? 0
? (usageWithDetails.completion_tokens_details?.reasoning_tokens ?? 0)
: 0,
};
}
Expand Down
11 changes: 3 additions & 8 deletions src/clients/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -252,8 +252,7 @@ export function toAnthropicMessages(messages: ChatMessage[]): {

// Add text content
if (message.content) {
const textContent =
typeof message.content === 'string' ? message.content : JSON.stringify(message.content);
const textContent = typeof message.content === 'string' ? message.content : JSON.stringify(message.content);
if (textContent) {
(result.content as unknown[]).push({
type: 'text',
Expand Down Expand Up @@ -325,9 +324,7 @@ export function toOpenAITools(tools: Map<string, Tool>): unknown[] {
for (const [key, value] of Object.entries(shape)) {
properties[key] = zodToJsonSchema(value);
const isOptionalFn = (value as { isOptional?: unknown }).isOptional;
const isOptional = typeof isOptionalFn === 'function'
? (isOptionalFn as () => boolean)()
: false;
const isOptional = typeof isOptionalFn === 'function' ? (isOptionalFn as () => boolean)() : false;
if (!isOptional) required.push(key);
}
}
Expand Down Expand Up @@ -372,9 +369,7 @@ export function toAnthropicTools(tools: Map<string, Tool>): unknown[] {
for (const [key, value] of Object.entries(shape)) {
properties[key] = zodToJsonSchema(value);
const isOptionalFn = (value as { isOptional?: unknown }).isOptional;
const isOptional = typeof isOptionalFn === 'function'
? (isOptionalFn as () => boolean)()
: false;
const isOptional = typeof isOptionalFn === 'function' ? (isOptionalFn as () => boolean)() : false;
if (!isOptional) required.push(key);
}
}
Expand Down
9 changes: 1 addition & 8 deletions src/clients/vercel-ai-gateway-client.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,14 +7,7 @@ import type { LanguageModel, ModelMessage, ToolSet } from 'ai';
import { generateText } from 'ai';
import retry from 'async-retry';
import { MAX_RETRY_ATTEMPTS, RETRY_MAX_TIMEOUT, RETRY_MIN_TIMEOUT } from '../constants.js';
import type {
AssistantMessage,
ChatMessage,
LLMClient,
TokenUsage,
Tool,
ToolCall,
} from '../core/models.js';
import type { AssistantMessage, ChatMessage, LLMClient, TokenUsage, Tool, ToolCall } from '../core/models.js';
import { ContextOverflowError } from '../core/models.js';

export interface VercelAIClientConfig {
Expand Down
6 changes: 1 addition & 5 deletions src/content/processors.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,7 @@ import { fileTypeFromBuffer } from 'file-type';
* @param maxPixels Maximum total pixels
* @returns Adjusted [width, height] with even dimensions
*/
export function calculateDownscaledDimensions(
width: number,
height: number,
maxPixels: number
): [number, number] {
export function calculateDownscaledDimensions(width: number, height: number, maxPixels: number): [number, number] {
const currentPixels = width * height;

if (currentPixels <= maxPixels) {
Expand Down
Loading