diff --git a/docs/api/ai-client.md b/docs/api/ai-client.md
index 6257e442..52b17743 100644
--- a/docs/api/ai-client.md
+++ b/docs/api/ai-client.md
@@ -13,8 +13,7 @@ npm install @tanstack/ai-client
The main client class for managing chat state.
```typescript
-import { ChatClient } from "@tanstack/ai-client";
-import { fetchServerSentEvents } from "@tanstack/ai-client";
+import { ChatClient, fetchServerSentEvents } from "@tanstack/ai-client";
const client = new ChatClient({
connection: fetchServerSentEvents("/api/chat"),
@@ -268,7 +267,7 @@ type ToolResultState =
Configure stream processing with chunk strategies:
```typescript
-import { ImmediateStrategy } from "@tanstack/ai-client";
+import { ImmediateStrategy, fetchServerSentEvents } from "@tanstack/ai-client";
const client = new ChatClient({
connection: fetchServerSentEvents("/api/chat"),
diff --git a/docs/api/ai-solid.md b/docs/api/ai-solid.md
new file mode 100644
index 00000000..e3e1c717
--- /dev/null
+++ b/docs/api/ai-solid.md
@@ -0,0 +1,259 @@
+# @tanstack/ai-solid API
+
+SolidJS primitives for TanStack AI, providing convenient SolidJS bindings for the headless client.
+
+## Installation
+
+```bash
+npm install @tanstack/ai-solid
+```
+
+## `useChat(options?)`
+
+Main primitive for managing chat state in SolidJS.
+
+```typescript
+import { useChat, fetchServerSentEvents } from "@tanstack/ai-solid";
+
+function ChatComponent() {
+ const { messages, sendMessage, isLoading, error, addToolApprovalResponse } =
+ useChat({
+ connection: fetchServerSentEvents("/api/chat"),
+ initialMessages: [],
+ onToolCall: async ({ toolName, input }) => {
+ // Handle client tool execution
+ return { result: "..." };
+ },
+ });
+
+ return
{/* Chat UI */}
;
+}
+```
+
+### Options
+
+Extends `ChatClientOptions` but omits state change callbacks (handled by SolidJS signals):
+
+- `connection` - Connection adapter (required)
+- `initialMessages?` - Initial messages array
+- `id?` - Unique identifier for this chat instance
+- `body?` - Additional body parameters to send
+- `onResponse?` - Callback when response is received
+- `onChunk?` - Callback when stream chunk is received
+- `onFinish?` - Callback when response finishes
+- `onError?` - Callback when error occurs
+- `onToolCall?` - Callback for client-side tool execution
+- `streamProcessor?` - Stream processing configuration
+
+### Returns
+
+```typescript
+interface UseChatReturn {
+ messages: Accessor;
+ sendMessage: (content: string) => Promise;
+ append: (message: ModelMessage | UIMessage) => Promise;
+ addToolResult: (result: {
+ toolCallId: string;
+ tool: string;
+ output: any;
+ state?: "output-available" | "output-error";
+ errorText?: string;
+ }) => Promise;
+ addToolApprovalResponse: (response: {
+ id: string;
+ approved: boolean;
+ }) => Promise;
+ reload: () => Promise;
+ stop: () => void;
+ isLoading: Accessor;
+ error: Accessor;
+ setMessages: (messages: UIMessage[]) => void;
+ clear: () => void;
+}
+```
+
+**Note:** Unlike React, `messages`, `isLoading`, and `error` are SolidJS `Accessor` functions, so you need to call them to get their values (e.g., `messages()` instead of just `messages`).
+
+## Connection Adapters
+
+Re-exported from `@tanstack/ai-client` for convenience:
+
+```typescript
+import {
+ fetchServerSentEvents,
+ fetchHttpStream,
+ stream,
+ type ConnectionAdapter,
+} from "@tanstack/ai-solid";
+```
+
+## Example: Basic Chat
+
+```typescript
+import { createSignal, For } from "solid-js";
+import { useChat, fetchServerSentEvents } from "@tanstack/ai-solid";
+
+export function Chat() {
+ const [input, setInput] = createSignal("");
+
+ const { messages, sendMessage, isLoading } = useChat({
+ connection: fetchServerSentEvents("/api/chat"),
+ });
+
+ const handleSubmit = (e: Event) => {
+ e.preventDefault();
+ if (input().trim() && !isLoading()) {
+ sendMessage(input());
+ setInput("");
+ }
+ };
+
+ return (
+
+
+
+ {(message) => (
+
+
{message.role}:
+
+ {(part) => {
+ if (part.type === "thinking") {
+ return (
+
+ 💠Thinking: {part.content}
+
+ );
+ }
+ if (part.type === "text") {
+ return {part.content};
+ }
+ return null;
+ }}
+
+
+ )}
+
+
+
+
+ );
+}
+```
+
+## Example: Tool Approval
+
+```typescript
+import { For, Show } from "solid-js";
+import { useChat, fetchServerSentEvents } from "@tanstack/ai-solid";
+
+export function ChatWithApproval() {
+ const { messages, sendMessage, addToolApprovalResponse } = useChat({
+ connection: fetchServerSentEvents("/api/chat"),
+ });
+
+ return (
+
+
+ {(message) => (
+
+ {(part) => (
+
+
+
Approve: {part.name}
+
+
+
+
+ )}
+
+ )}
+
+
+ );
+}
+```
+
+## Example: Client Tools
+
+```typescript
+import { useChat, fetchServerSentEvents } from "@tanstack/ai-solid";
+import { createSignal } from "solid-js";
+
+export function ChatWithClientTools() {
+ const [notification, setNotification] = createSignal("");
+
+ const { messages, sendMessage } = useChat({
+ connection: fetchServerSentEvents("/api/chat"),
+ onToolCall: async ({ toolName, input }) => {
+ switch (toolName) {
+ case "updateUI":
+ // Update SolidJS state
+ setNotification(input.message);
+ return { success: true };
+
+ case "saveToLocalStorage":
+ localStorage.setItem(input.key, input.value);
+ return { saved: true };
+
+ default:
+ throw new Error(`Unknown tool: ${toolName}`);
+ }
+ },
+ });
+
+ // ... rest of component
+}
+```
+
+## Types
+
+All types are re-exported from `@tanstack/ai-client`:
+
+- `UIMessage`
+- `MessagePart`
+- `TextPart`
+- `ThinkingPart`
+- `ToolCallPart`
+- `ToolResultPart`
+- `ChatClientOptions`
+- `ConnectionAdapter`
+- `ChatRequestBody`
+
+## Next Steps
+
+- [Getting Started](../getting-started/quick-start) - Learn the basics
+- [Tools Guide](../guides/tools) - Learn about tools
+- [Client Tools](../guides/client-tools) - Learn about client-side tools
diff --git a/docs/api/ai.md b/docs/api/ai.md
index 1ca11bf0..ffd646eb 100644
--- a/docs/api/ai.md
+++ b/docs/api/ai.md
@@ -112,7 +112,7 @@ const myTool = tool({
// Tool implementation
return { result: "..." };
},
- requiresApproval: false, // Optional
+ needsApproval: false, // Optional
});
```
@@ -121,15 +121,43 @@ const myTool = tool({
- `description` - Tool description for the model
- `inputSchema` - Zod schema for input validation
- `execute` - Async function to execute the tool
-- `requiresApproval?` - Whether tool requires user approval
+- `needsApproval?` - Whether tool requires user approval
### Returns
A `Tool` object.
-## `toStreamResponse(stream)`
+## `toServerSentEventsStream(stream, abortController?)`
-Converts a stream to an HTTP Response.
+Converts a stream to a ReadableStream in Server-Sent Events format.
+
+```typescript
+import { toServerSentEventsStream, chat } from "@tanstack/ai";
+import { openai } from "@tanstack/ai-openai";
+
+const stream = chat({
+ adapter: openai(),
+ messages: [...],
+ model: "gpt-4o",
+});
+const readableStream = toServerSentEventsStream(stream);
+```
+
+### Parameters
+
+- `stream` - Async iterable of `StreamChunk`
+- `abortController?` - Optional AbortController to abort when stream is cancelled
+
+### Returns
+
+A `ReadableStream` in Server-Sent Events format. Each chunk is:
+- Prefixed with `"data: "`
+- Followed by `"\n\n"`
+- Stream ends with `"data: [DONE]\n\n"`
+
+## `toStreamResponse(stream, init?)`
+
+Converts a stream to an HTTP Response with proper SSE headers.
```typescript
import { toStreamResponse, chat } from "@tanstack/ai";
@@ -146,10 +174,11 @@ return toStreamResponse(stream);
### Parameters
- `stream` - Async iterable of `StreamChunk`
+- `init?` - Optional ResponseInit options (including `abortController`)
### Returns
-A `Response` object suitable for HTTP endpoints.
+A `Response` object suitable for HTTP endpoints with SSE headers (`Content-Type: text/event-stream`, `Cache-Control: no-cache`, `Connection: keep-alive`).
## `maxIterations(count)`
@@ -228,7 +257,7 @@ interface Tool {
parameters: Record;
};
execute?: (args: any) => Promise | any;
- requiresApproval?: boolean;
+ needsApproval?: boolean;
}
```
diff --git a/docs/guides/client-tools.md b/docs/guides/client-tools.md
index 10dbabdc..f52750f1 100644
--- a/docs/guides/client-tools.md
+++ b/docs/guides/client-tools.md
@@ -40,7 +40,7 @@ export const saveToLocalStorage = tool({
Pass client tools to the chat (they won't execute on the server):
```typescript
-import { chat, toStreamResponse } from "@tanstack/ai";
+import { chat, toServerSentEventsStream } from "@tanstack/ai";
import { openai } from "@tanstack/ai-openai";
import { updateUI, saveToLocalStorage } from "./tools";
@@ -54,7 +54,7 @@ export async function POST(request: Request) {
tools: [updateUI, saveToLocalStorage], // Model knows about these tools
});
- return toStreamResponse(stream);
+ return toServerSentEventsStream(stream);
}
```
diff --git a/docs/guides/connection-adapters.md b/docs/guides/connection-adapters.md
index b8d21375..2cb2eafa 100644
--- a/docs/guides/connection-adapters.md
+++ b/docs/guides/connection-adapters.md
@@ -6,10 +6,10 @@ Connection adapters handle the communication between your client and server for
### Server-Sent Events (SSE)
-SSE is the recommended adapter for most use cases. It provides reliable streaming with automatic reconnection:
+SSE is the recommended adapter for most use cases. It provides reliable streaming with automatic reconnection. On the server side, use [`toServerSentEventsStream()`](../api/ai#toserversenteventsstreamstream-abortcontroller) or [`toStreamResponse()`](../api/ai#tostreamresponsestream-init) to convert your chat stream to SSE format.
```typescript
-import { fetchServerSentEvents } from "@tanstack/ai-react";
+import { useChat, fetchServerSentEvents } from "@tanstack/ai-react";
const { messages } = useChat({
connection: fetchServerSentEvents("/api/chat"),
@@ -19,12 +19,28 @@ const { messages } = useChat({
**Options:**
```typescript
-fetchServerSentEvents("/api/chat", {
- headers: {
- Authorization: "Bearer token",
- },
- method: "POST",
-})
+const { messages } = useChat({
+ connection: fetchServerSentEvents("/api/chat", {
+ headers: {
+ Authorization: "Bearer token",
+ },
+ }),
+});
+```
+
+**Dynamic values:**
+
+You can use functions for dynamic URLs or options that are evaluated on each request:
+
+```typescript
+const { messages } = useChat({
+ connection: fetchServerSentEvents(
+ () => `/api/chat?user=${currentUserId}`,
+ () => ({
+ headers: { Authorization: `Bearer ${getToken()}` },
+ })
+ ),
+});
```
### HTTP Stream
@@ -32,7 +48,7 @@ fetchServerSentEvents("/api/chat", {
For environments that don't support SSE:
```typescript
-import { fetchHttpStream } from "@tanstack/ai-react";
+import { useChat, fetchHttpStream } from "@tanstack/ai-react";
const { messages } = useChat({
connection: fetchHttpStream("/api/chat"),
@@ -160,7 +176,7 @@ const adapter = stream(async (messages, data, signal) => {
Add authentication headers to adapters:
```typescript
-import { fetchServerSentEvents } from "@tanstack/ai-react";
+import { useChat, fetchServerSentEvents } from "@tanstack/ai-react";
const { messages } = useChat({
connection: fetchServerSentEvents("/api/chat", {
@@ -171,6 +187,18 @@ const { messages } = useChat({
});
```
+For dynamic tokens, use a function:
+
+```typescript
+const { messages } = useChat({
+ connection: fetchServerSentEvents("/api/chat", () => ({
+ headers: {
+ Authorization: `Bearer ${getToken()}`,
+ },
+ })),
+});
+```
+
## Best Practices
1. **Use SSE for most cases** - It's reliable and well-supported
diff --git a/docs/guides/streaming.md b/docs/guides/streaming.md
index 6fd85d82..f4a57d49 100644
--- a/docs/guides/streaming.md
+++ b/docs/guides/streaming.md
@@ -93,7 +93,7 @@ TanStack AI provides connection adapters for different streaming protocols:
### Server-Sent Events (SSE)
```typescript
-import { fetchServerSentEvents } from "@tanstack/ai-react";
+import { useChat, fetchServerSentEvents } from "@tanstack/ai-react";
const { messages } = useChat({
connection: fetchServerSentEvents("/api/chat"),
@@ -103,7 +103,7 @@ const { messages } = useChat({
### HTTP Stream
```typescript
-import { fetchHttpStream } from "@tanstack/ai-react";
+import { useChat, fetchHttpStream } from "@tanstack/ai-react";
const { messages } = useChat({
connection: fetchHttpStream("/api/chat"),
diff --git a/examples/ts-react-chat/src/routes/index.tsx b/examples/ts-react-chat/src/routes/index.tsx
index d8c13e74..996bfef8 100644
--- a/examples/ts-react-chat/src/routes/index.tsx
+++ b/examples/ts-react-chat/src/routes/index.tsx
@@ -6,13 +6,11 @@ import rehypeRaw from 'rehype-raw'
import rehypeSanitize from 'rehype-sanitize'
import rehypeHighlight from 'rehype-highlight'
import remarkGfm from 'remark-gfm'
-import {
- useChat,
- fetchServerSentEvents,
- type UIMessage,
-} from '@tanstack/ai-react'
+import { fetchServerSentEvents, useChat } from '@tanstack/ai-react'
import { ThinkingPart } from '@tanstack/ai-react-ui'
+import type { UIMessage } from '@tanstack/ai-react'
+
import GuitarRecommendation from '@/components/example-GuitarRecommendation'
function ChatInputArea({ children }: { children: React.ReactNode }) {
@@ -330,12 +328,14 @@ function DebugPanel({
)
}
+const connection = fetchServerSentEvents('/api/tanchat')
+
function ChatPage() {
const [chunks, setChunks] = useState([])
const { messages, sendMessage, isLoading, addToolApprovalResponse, stop } =
useChat({
- connection: fetchServerSentEvents('/api/tanchat'),
+ connection,
onChunk: (chunk: any) => {
setChunks((prev) => [...prev, chunk])
},
@@ -363,10 +363,8 @@ function ChatPage() {
guitarId: input.guitarId,
totalItems: wishList.length,
}
-
- default:
- throw new Error(`Unknown client tool: ${toolName}`)
}
+ return Promise.resolve({ result: 'Unknown client tool' })
},
})
const [input, setInput] = useState('')
diff --git a/packages/typescript/ai-client/src/connection-adapters.ts b/packages/typescript/ai-client/src/connection-adapters.ts
index 7bbb709d..d0a03ff3 100644
--- a/packages/typescript/ai-client/src/connection-adapters.ts
+++ b/packages/typescript/ai-client/src/connection-adapters.ts
@@ -93,38 +93,53 @@ export interface FetchConnectionOptions {
/**
* Create a Server-Sent Events connection adapter
*
- * @param url - The API endpoint URL
- * @param options - Fetch options (headers, credentials, etc.)
+ * @param url - The API endpoint URL (or a function that returns the URL)
+ * @param options - Fetch options (headers, credentials, etc.) or a function that returns options
* @returns A connection adapter for SSE streams
*
* @example
* ```typescript
+ * // Static URL
+ * const connection = fetchServerSentEvents('/api/chat');
+ *
+ * // Dynamic URL
+ * const connection = fetchServerSentEvents(() => `/api/chat?user=${userId}`);
+ *
+ * // With options
* const connection = fetchServerSentEvents('/api/chat', {
* headers: { 'Authorization': 'Bearer token' }
* });
*
- * const client = new ChatClient({ connection });
+ * // With dynamic options
+ * const connection = fetchServerSentEvents('/api/chat', () => ({
+ * headers: { 'Authorization': `Bearer ${getToken()}` }
+ * }));
* ```
*/
export function fetchServerSentEvents(
- url: string,
- options: FetchConnectionOptions = {},
+ url: string | (() => string),
+ options: FetchConnectionOptions | (() => FetchConnectionOptions) = {},
): ConnectionAdapter {
return {
async *connect(messages, data, abortSignal) {
+ // Resolve URL and options if they are functions
+ const resolvedUrl = typeof url === 'function' ? url() : url
+ const resolvedOptions =
+ typeof options === 'function' ? options() : options
+
const modelMessages = convertMessagesToModelMessages(messages)
const requestHeaders: Record = {
'Content-Type': 'application/json',
- ...mergeHeaders(options.headers),
+ ...mergeHeaders(resolvedOptions.headers),
}
- const response = await fetch(url, {
+ const response = await fetch(resolvedUrl, {
method: 'POST',
headers: requestHeaders,
body: JSON.stringify({ messages: modelMessages, data }),
- credentials: options.credentials || 'same-origin',
- signal: abortSignal || options.signal,
+ credentials: resolvedOptions.credentials || 'same-origin',
+ signal: abortSignal || resolvedOptions.signal,
})
if (!response.ok) {
@@ -160,39 +175,54 @@ export function fetchServerSentEvents(
/**
* Create an HTTP streaming connection adapter (for raw streaming without SSE format)
*
- * @param url - The API endpoint URL
- * @param options - Fetch options (headers, credentials, etc.)
+ * @param url - The API endpoint URL (or a function that returns the URL)
+ * @param options - Fetch options (headers, credentials, etc.) or a function that returns options
* @returns A connection adapter for HTTP streams
*
* @example
* ```typescript
+ * // Static URL
+ * const connection = fetchHttpStream('/api/chat');
+ *
+ * // Dynamic URL
+ * const connection = fetchHttpStream(() => `/api/chat?user=${userId}`);
+ *
+ * // With options
* const connection = fetchHttpStream('/api/chat', {
* headers: { 'Authorization': 'Bearer token' }
* });
*
- * const client = new ChatClient({ connection });
+ * // With dynamic options
+ * const connection = fetchHttpStream('/api/chat', () => ({
+ * headers: { 'Authorization': `Bearer ${getToken()}` }
+ * }));
* ```
*/
export function fetchHttpStream(
- url: string,
- options: FetchConnectionOptions = {},
+ url: string | (() => string),
+ options: FetchConnectionOptions | (() => FetchConnectionOptions) = {},
): ConnectionAdapter {
return {
async *connect(messages, data, abortSignal) {
+ // Resolve URL and options if they are functions
+ const resolvedUrl = typeof url === 'function' ? url() : url
+ const resolvedOptions =
+ typeof options === 'function' ? options() : options
+
// Convert UIMessages to ModelMessages if needed
const modelMessages = convertMessagesToModelMessages(messages)
const requestHeaders: Record = {
'Content-Type': 'application/json',
- ...mergeHeaders(options.headers),
+ ...mergeHeaders(resolvedOptions.headers),
}
- const response = await fetch(url, {
+ const response = await fetch(resolvedUrl, {
method: 'POST',
headers: requestHeaders,
body: JSON.stringify({ messages: modelMessages, data }),
- credentials: options.credentials || 'same-origin',
- signal: abortSignal || options.signal,
+ credentials: resolvedOptions.credentials || 'same-origin',
+ signal: abortSignal || resolvedOptions.signal,
})
if (!response.ok) {
diff --git a/packages/typescript/ai-react/src/use-chat.ts b/packages/typescript/ai-react/src/use-chat.ts
index d01c4ec0..c41633ea 100644
--- a/packages/typescript/ai-react/src/use-chat.ts
+++ b/packages/typescript/ai-react/src/use-chat.ts
@@ -23,9 +23,9 @@ export function useChat(options: UseChatOptions): UseChatReturn {
}, [messages])
// Create ChatClient instance with callbacks to sync state
- // Note: Connection changes will recreate the client and reset state.
- // Body and other options are captured at client creation time.
- // To update connection/body, remount the component or use a key prop.
+ // Note: Options are captured at client creation time.
+ // The connection adapter can use functions for dynamic values (url, headers, etc.)
+ // which are evaluated lazily on each request.
const client = useMemo(() => {
// On first mount, use initialMessages. On subsequent recreations, preserve existing messages.
const messagesToUse = isFirstMountRef.current
@@ -55,9 +55,9 @@ export function useChat(options: UseChatOptions): UseChatReturn {
setError(newError)
},
})
- // Only recreate when connection changes (most critical option)
- // Other options are captured at creation time
- }, [clientId, options.connection])
+ // Only recreate when clientId changes
+ // Connection and other options are captured at creation time
+ }, [clientId])
// Sync initial messages on mount only
// Note: initialMessages are passed to ChatClient constructor, but we also
diff --git a/packages/typescript/ai-solid/src/use-chat.ts b/packages/typescript/ai-solid/src/use-chat.ts
index c634e04d..bc557862 100644
--- a/packages/typescript/ai-solid/src/use-chat.ts
+++ b/packages/typescript/ai-solid/src/use-chat.ts
@@ -19,9 +19,9 @@ export function useChat(options: UseChatOptions = {}): UseChatReturn {
const [error, setError] = createSignal(undefined)
// Create ChatClient instance with callbacks to sync state
- // Note: Connection changes will recreate the client and reset state.
- // Body and other options are captured at client creation time.
- // To update connection/body, remount the component or use a key prop.
+ // Note: Options are captured at client creation time.
+ // The connection adapter can use functions for dynamic values (url, headers, etc.)
+ // which are evaluated lazily on each request.
const client = createMemo(() => {
return new ChatClient({
connection: options.connection,
@@ -44,10 +44,10 @@ export function useChat(options: UseChatOptions = {}): UseChatReturn {
setError(newError)
},
})
- // Only recreate when connection changes (most critical option)
- // Other options are captured at creation time
+ // Only recreate when clientId changes
+ // Connection and other options are captured at creation time
// eslint-disable-next-line react-hooks/exhaustive-deps
- }, [clientId, options.connection])
+ }, [clientId])
// Sync initial messages on mount only
// Note: initialMessages are passed to ChatClient constructor, but we also