diff --git a/docs/package-lock.json b/docs/package-lock.json
index 7df038b..061ea86 100644
--- a/docs/package-lock.json
+++ b/docs/package-lock.json
@@ -19,11 +19,12 @@
"dependencies": {
"@types/react": "^17.0.0",
"@types/react-dom": "^17.0.0",
- "@vectara/stream-query-client": "^3.2.0",
+ "@vectara/stream-query-client": "^5.1.0",
"classnames": "^2.3.2",
"lodash": "^4.17.21",
"prismjs": "^1.29.0",
"react-focus-on": "^3.9.1",
+ "react-markdown": "^8.0.6",
"uuid-by-string": "^4.0.0"
},
"devDependencies": {
@@ -84,7 +85,7 @@
"@types/react-dom": "^17.0.0",
"@typescript-eslint/eslint-plugin": "^5.50.0",
"@typescript-eslint/parser": "^5.50.0",
- "@vectara/stream-query-client": "^3.2.0",
+ "@vectara/stream-query-client": "^5.1.0",
"chokidar": "^3.5.3",
"classnames": "^2.3.2",
"cross-fetch": "^4.0.0",
@@ -107,6 +108,7 @@
"react-focus-on": "^3.9.1",
"react-icons": "^5.0.1",
"react-jsx-parser": "^1.29.0",
+ "react-markdown": "^8.0.6",
"react-router-dom": "^6.8.2",
"rimraf": "^5.0.5",
"shadow-dom-testing-library": "^1.11.2",
diff --git a/docs/src/index.tsx b/docs/src/index.tsx
index 353f21b..007dca4 100644
--- a/docs/src/index.tsx
+++ b/docs/src/index.tsx
@@ -28,7 +28,7 @@ import { HeaderLogo } from "./components/HeaderLogo";
import { ConfigurationDrawer } from "components/ConfigurationDrawer";
import "./ui/_index.scss";
import "./index.scss";
-import { RerankerId } from "../../src/types";
+import { AgenticResponse, RerankerId } from "../../src/types";
const formatStringProp = (value?: string) => {
if (!value) {
@@ -235,6 +235,38 @@ const App = () => {
rerankerId={rerankerId}
lambda={lambda}
enableStreaming={isStreamingEnabled}
+ agenticConfiguration={{
+ url: "https://vectara-com-chatbot-agent-server.onrender.com/verify-prospect",
+ onAgenticResponse: (response: AgenticResponse) => {
+ if (response.event === "prompt_schedule_sales") {
+ return {
+ message: response.message,
+ userActionOptions: [
+ {
+ label: "Schedule a demo",
+ onSelect: () =>
+ console.log("In a live context, this would connect you to the Vectara Sales team.")
+ }
+ ]
+ };
+ }
+
+ if (response.event === "handle_prospect_decline") {
+ return {
+ message: response.message
+ };
+ }
+
+ if (response.event === "schedule_sales") {
+ return {
+ message: {
+ content: "In a live context, this would connect you to the Vectara Sales team."
+ }
+ };
+ }
+ }
+ }}
+ requestSource="react-chatbot-docs"
/>
diff --git a/package-lock.json b/package-lock.json
index df6c2cc..47be292 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -11,7 +11,7 @@
"dependencies": {
"@types/react": "^17.0.0",
"@types/react-dom": "^17.0.0",
- "@vectara/stream-query-client": "^3.2.0",
+ "@vectara/stream-query-client": "^5.1.0",
"classnames": "^2.3.2",
"lodash": "^4.17.21",
"prismjs": "^1.29.0",
@@ -2526,9 +2526,9 @@
"dev": true
},
"node_modules/@vectara/stream-query-client": {
- "version": "3.2.0",
- "resolved": "https://registry.npmjs.org/@vectara/stream-query-client/-/stream-query-client-3.2.0.tgz",
- "integrity": "sha512-OPUik3bIVOKPc3ytvpFTyGbrEHDicbn9nFWiG5ztgAovaDlRL4bu6d0sXOAQLFYgylVMSML+xV4iIsql25P9yg=="
+ "version": "5.1.0",
+ "resolved": "https://registry.npmjs.org/@vectara/stream-query-client/-/stream-query-client-5.1.0.tgz",
+ "integrity": "sha512-7vYg1fHsCQFuVRiP8eyqJ9LUzF3J60F6P0XpyW5N0TY6x1kmMfGb4vWHBUDIFR92MTPlMOBLNb6oQPfW4QiOCQ=="
},
"node_modules/abab": {
"version": "2.0.6",
diff --git a/package.json b/package.json
index 61326e5..c9efa9e 100644
--- a/package.json
+++ b/package.json
@@ -32,7 +32,7 @@
"dependencies": {
"@types/react": "^17.0.0",
"@types/react-dom": "^17.0.0",
- "@vectara/stream-query-client": "^3.2.0",
+ "@vectara/stream-query-client": "^5.1.0",
"classnames": "^2.3.2",
"lodash": "^4.17.21",
"prismjs": "^1.29.0",
diff --git a/src/components/ChatItem.tsx b/src/components/ChatItem.tsx
index bdccc6e..abc6337 100644
--- a/src/components/ChatItem.tsx
+++ b/src/components/ChatItem.tsx
@@ -114,14 +114,12 @@ export const ChatItem = ({ question, answer, searchResults, factualConsistencySc
)}
-
{factualConsistencyScore && (
<>
{factualConsistencyScore}
>
)}
-
{reorderedSearchResults && reorderedSearchResults.length > 0 && (
<>
@@ -140,9 +138,11 @@ export const ChatItem = ({ question, answer, searchResults, factualConsistencySc
return (
<>
-
+ {question && (
+
+ )}
diff --git a/src/components/ChatReferences.tsx b/src/components/ChatReferences.tsx
index 4d40944..8f9b866 100644
--- a/src/components/ChatReferences.tsx
+++ b/src/components/ChatReferences.tsx
@@ -1,6 +1,6 @@
-import {VuiFlexContainer, VuiFlexItem, VuiText, VuiAccordion, VuiSpacer} from "../vui";
+import { VuiFlexContainer, VuiFlexItem, VuiText, VuiAccordion, VuiSpacer } from "../vui";
import { SearchResultWithSnippet } from "../types";
-import {parseSnippet} from "../utils/parseSnippet";
+import { parseSnippet } from "../utils/parseSnippet";
type Props = {
searchResults: SearchResultWithSnippet[];
@@ -32,7 +32,7 @@ export const ChatReferences = ({ searchResults, isOpen = false, setIsOpen = () =
const ChatReference = ({ result, position }: { result: SearchResultWithSnippet; position: number }) => {
const url = result.document_metadata.url as string;
- const { text } = parseSnippet(result?.snippet?.text)
+ const { text } = parseSnippet(result?.snippet?.text);
return (
<>
@@ -48,7 +48,7 @@ const ChatReference = ({ result, position }: { result: SearchResultWithSnippet;
{text}
) : (
- text
+ text
)}
diff --git a/src/components/ChatView.tsx b/src/components/ChatView.tsx
index 447312f..6c12e67 100644
--- a/src/components/ChatView.tsx
+++ b/src/components/ChatView.tsx
@@ -1,5 +1,12 @@
import { Fragment, ReactNode, useEffect, useMemo, useRef, useState } from "react";
-import { VuiButtonSecondary, VuiContextProvider, VuiFlexContainer, VuiFlexItem, VuiSpacer } from "../vui";
+import {
+ VuiButtonSecondary,
+ VuiContextProvider,
+ VuiFlexContainer,
+ VuiFlexItem,
+ VuiSpacer,
+ VuiTopicButton
+} from "../vui";
import { QueryInput } from "./QueryInput";
import { ChatItem } from "./ChatItem";
import { useChat } from "../useChat";
@@ -7,7 +14,7 @@ import { Loader } from "./Loader";
import { MinimizeIcon } from "./Icons";
import { FactualConsistencyBadge } from "./FactualConsistencyBadge";
import { ExampleQuestions } from "./exampleQuestions/ExampleQuestions";
-import {RerankerId, SummaryLanguage} from "types";
+import { AgenticConfiguration, ChatActionOption, RerankerId, SummaryLanguage } from "types";
const inputSizeToQueryInputSize = {
large: "l",
@@ -66,6 +73,13 @@ export interface Props {
// Enables streaming responses from the API. Defaults to true.
enableStreaming?: boolean;
+
+ // Enables the chatbot to modify its behavior by sending requests to an agentic service.
+ agenticConfiguration?: AgenticConfiguration;
+
+ // A string that allows the Vectara platform to track where chat requests are coming from.
+ // This could be an application name, for example.
+ requestSource?: string;
}
/**
@@ -91,6 +105,8 @@ export const ChatView = ({
rerankerId,
lambda,
enableStreaming = true,
+ agenticConfiguration,
+ requestSource
}: Props) => {
const [isOpen, setIsOpen] = useState(isInitiallyOpen ?? false);
const [query, setQuery] = useState("");
@@ -105,7 +121,9 @@ export const ChatView = ({
summaryPromptName,
rerankerId,
lambda,
- enableStreaming
+ enableStreaming,
+ agenticConfiguration,
+ requestSource
});
const appLayoutRef = useRef(null);
@@ -150,27 +168,53 @@ export const ChatView = ({
const historyItems = useMemo(
() =>
- messageHistory.map((turn, index) => {
- const { question, answer, results, factualConsistencyScore } = turn;
- const onRetry =
- hasError && index === messageHistory.length - 1
- ? () => sendMessage({ query: question, isRetry: true })
- : undefined;
-
- return (
-
-
- }
- onRetry={onRetry}
- />
- {index < messageHistory.length - 1 && }
-
- );
+ messageHistory.map((messageHistoryItem, index) => {
+ if (messageHistoryItem.type === "action") {
+ const { options } = messageHistoryItem;
+
+ return (
+
+
+
+ {options?.map((option: ChatActionOption, optionIndex: number) => (
+ {
+ if (option.message) {
+ sendMessage({ query: option.message });
+ }
+ option.onSelect?.();
+ }}
+ />
+ ))}
+
+
+
+ );
+ } else {
+ const { question, answer, results, factualConsistencyScore } = messageHistoryItem;
+ const onRetry =
+ hasError && index === messageHistory.length - 1
+ ? () => sendMessage({ query: question, isRetry: true })
+ : undefined;
+
+ return (
+
+
+ }
+ onRetry={onRetry}
+ />
+ {index < messageHistory.length - 1 && }
+
+ );
+ }
}),
[messageHistory]
);
@@ -180,8 +224,10 @@ export const ChatView = ({
const onSendQuery = (queryOverride?: string) => {
if (isRequestDisabled && !queryOverride) return;
- sendMessage({ query: queryOverride ?? query });
+
setQuery("");
+
+ sendMessage({ query: queryOverride ?? query });
};
const spacer = historyItems.length === 0 ? null : ;
diff --git a/src/components/chatView.scss b/src/components/chatView.scss
index 6644cc7..6f9b38f 100644
--- a/src/components/chatView.scss
+++ b/src/components/chatView.scss
@@ -49,6 +49,18 @@ $chatbotPosition: $sizeS;
}
}
+.vrcbChatMessageContainer--actionResponse {
+ justify-content: flex-start;
+ padding: $sizeM $sizeXxl $sizeM $sizeS;
+
+ .vrcbChatMessage {
+ color: $colorText;
+ font-weight: $fontWeightBold;
+ font-size: $fontSizeStandard;
+ padding-left: 0;
+ }
+}
+
.vrcbChatMessageContainer--thinking,
.vrcbChatMessageContainer--answer {
padding: 0 $sizeXxl;
diff --git a/src/index.tsx b/src/index.tsx
index 2622806..533bd8c 100644
--- a/src/index.tsx
+++ b/src/index.tsx
@@ -1,7 +1,7 @@
import { ReactNode, useEffect, useRef } from "react";
import * as ReactDOM from "react-dom";
import { Props, ChatView } from "./components/ChatView";
-import type {RerankerId, SummaryLanguage} from "./types";
+import type { AgenticConfiguration, RerankerId, SummaryLanguage } from "./types";
export type { Props } from "components/ChatView";
export { DEFAULT_SUMMARIZER, DEFAULT_RERANKER_ID, DEFAULT_LAMBDA_VALUE } from "./useChat";
@@ -15,6 +15,7 @@ class ReactChatbotWebComponent extends HTMLElement {
// References
emptyStateDisplay!: ReactNode;
+ agenticConfiguration!: AgenticConfiguration;
static get observedAttributes() {
return [
@@ -35,6 +36,8 @@ class ReactChatbotWebComponent extends HTMLElement {
"rerankerId",
"lambda",
"enablestreaming",
+ "agenticconfigurationupdatetime",
+ "requestsource"
];
}
@@ -67,9 +70,15 @@ class ReactChatbotWebComponent extends HTMLElement {
this.setAttribute("emptystatedisplayupdatetime", Date.now().toString());
}
+ public setAgenticConfiguration(agenticConfiguration: AgenticConfiguration) {
+ this.agenticConfiguration = agenticConfiguration;
+
+ this.setAttribute("agenticconfigurationupdatetime", Date.now().toString());
+ }
+
public connectedCallback() {
const customerId = this.getAttribute("customerId") ?? "";
- const corpusKeys = (this.getAttribute("corpuskeys") ?? "");
+ const corpusKeys = this.getAttribute("corpuskeys") ?? "";
const apiKey = this.getAttribute("apiKey") ?? "";
const title = this.getAttribute("title") ?? undefined;
const placeholder = this.getAttribute("placeholder") ?? undefined;
@@ -83,12 +92,14 @@ class ReactChatbotWebComponent extends HTMLElement {
const language = (this.getAttribute("language") as SummaryLanguage) ?? undefined;
const enableFactualConsistencyScore = this.getAttribute("enableFactualConsistencyScore") === "true";
const summaryPromptName = this.getAttribute("summaryPromptName") ?? undefined;
- const rerankerId = this.getAttribute("rerankerId") !== null ? parseInt(this.getAttribute("rerankerId")!, 10) : undefined;
+ const rerankerId =
+ this.getAttribute("rerankerId") !== null ? parseInt(this.getAttribute("rerankerId")!, 10) : undefined;
const lambda = this.getAttribute("lambda") !== null ? parseFloat(this.getAttribute("lambda")!) : undefined;
const enableStreaming =
- this.getAttribute("enableStreaming") !== null ? this.getAttribute("enableStreaming") == "true" : undefined;
+ this.getAttribute("enableStreaming") !== null ? this.getAttribute("enableStreaming") == "true" : undefined;
+ const agenticConfiguration = this.agenticConfiguration ?? undefined;
+ const requestSource = this.getAttribute("requestsource") ?? undefined;
- console.log(corpusKeys)
ReactDOM.render(
,
this.mountPoint
@@ -134,6 +147,11 @@ export const ReactChatbot = (props: Props) => {
// @ts-ignore
(ref.current as ReactChatbotWebComponent).setEmptyStateDisplay(props.emptyStateDisplay);
}
+
+ if (props.agenticConfiguration) {
+ // @ts-ignore
+ (ref.current as ReactChatbotWebComponent).setAgenticConfiguration(props.agenticConfiguration);
+ }
}, [props]);
const typedProps = props as Record;
diff --git a/src/types.ts b/src/types.ts
index 1b1ea0e..106a70d 100644
--- a/src/types.ts
+++ b/src/types.ts
@@ -59,12 +59,33 @@ export type SearchError = {
export type ChatTurn = {
id: string;
+ type: "turn";
question: string;
answer: string;
results: SearchResultWithSnippet[];
factualConsistencyScore?: number;
};
+export type ChatAction = {
+ id: string;
+ type: "action";
+ options: Array;
+};
+
+export type ChatActionOption = {
+ label: string;
+
+ // An optional message to send to the chatbot when the user selects this action.
+ message?: string;
+
+ // An optional link to send the user to when they select this action.
+ url?: string;
+
+ onSelect?: () => void;
+};
+
+export type MessageHistoryItem = ChatTurn | ChatAction;
+
export type NoneReranker = { type: "none" };
export type CustomerSpecificReranker = {
@@ -162,10 +183,56 @@ export type ChatQueryResponse = {
factual_consistency_score: number;
response_language: string;
rendered_prompt: string;
- rephrased_query: string
-}
+ rephrased_query: string;
+};
+
+export type AgenticMessageHistoryItem = {
+ role: "user" | "chatbot";
+ message: string;
+};
+
+export type AgenticResponse = {
+ // Any string that corresponds to an event.
+ // Useful for specifying behaviors to be executed in the agentic configuration callback.
+ event: string;
+
+ // An optional message from the service
+ message?: AgenticMessage;
+};
+
+export type AgenticMessage = {
+ content?: string;
+ post?: string;
+};
+
+/**
+ * Configuration for connecting to an agentic service.
+ * Use the `onAgenticResponse` callback to execute any side effects in parent component.
+ * Notes:
+ * - The chat history, including the user's most recent message is sent to the service.
+ * - The service response must conform to the AgenticResponse type.
+ */
+export type AgenticConfiguration = {
+ // The URL of the web service that responds in the form of the AgenticResponse type.
+ url: string;
+
+ // A callback for handling the web service response.
+ // Can return an AgenticResponseActionConfiguration to display a message and optional actions for the user to take.
+ onAgenticResponse: (response: AgenticResponse) => AgenticResponseActionConfiguration | undefined;
+};
+
+/**
+ * An configuration that can be returned by the agentic response handler.
+ */
+export type AgenticResponseActionConfiguration = {
+ // The message that the chatbot should show the user.
+ message: AgenticMessage;
+
+ // An optional array of actions that the user can take to respond.
+ userActionOptions?: Array;
+};
-export type RerankerId = 272725719 | 272725718
+export type RerankerId = 272725719 | 272725718;
export const mmrRerankerId = 272725718;
export const DEFAULT_DOMAIN = "https://api.vectara.io";
diff --git a/src/useChat.test.ts b/src/useChat.test.ts
index ea3a69f..00df359 100644
--- a/src/useChat.test.ts
+++ b/src/useChat.test.ts
@@ -3,6 +3,7 @@ import { act, renderHook } from "@testing-library/react-hooks";
import { waitFor } from "@testing-library/react";
import * as sendSearchRequestInterface from "./utils/sendSearchRequest";
import * as streamQueryInterface from "@vectara/stream-query-client";
+import { AgenticResponse, ChatTurn } from "./types";
jest.mock("@vectara/stream-query-client", () => {
return {
@@ -26,6 +27,14 @@ const MOCK_API_RESPONSE = {
]
};
+const AGENTIC_RESPONSE_MESSAGE_WITH_CONTENT_OVERRIDE = {
+ content: "This is the message the chatbot will send when receiving a known agentic service event."
+};
+
+const AGENTIC_RESPONSE_MESSAGE_TO_APPEND_TO_QUERY_REPONSE = {
+ post: "This is supplementary text to append to the original message generated by the query API."
+};
+
describe("useChat", () => {
let sendSearchRequestSpy: jest.SpyInstance;
let streamQuerySpy: jest.SpyInstance;
@@ -42,13 +51,13 @@ describe("useChat", () => {
describe("streaming", () => {
it("should send messages and update hook values", async () => {
const { result } = renderHook(() =>
- useChat({ customerId: "mock-customer-id", corpusKeys: "1", apiKey: "mock-api-key" })
+ useChat({ customerId: "mock-customer-id", corpusKeys: "1", apiKey: "mock-api-key" })
);
- streamQuerySpy.mockImplementation(async ({onStreamEvent}) => {
+ streamQuerySpy.mockImplementation(async ({ onStreamEvent }) => {
await onStreamEvent({
type: "generationChunk",
- updatedText: "mock-updated-text",
+ updatedText: "mock-updated-text"
});
});
@@ -57,6 +66,7 @@ describe("useChat", () => {
});
expect(result.current.activeMessage).toEqual({
+ type: "turn",
answer: "mock-updated-text",
id: "placeholder-message-id",
question: "mock-query",
@@ -66,7 +76,7 @@ describe("useChat", () => {
expect(result.current.isStreamingResponse).toEqual(true);
expect(result.current.messageHistory).toEqual([]);
- streamQuerySpy.mockImplementation(async ({onStreamEvent}) => {
+ streamQuerySpy.mockImplementation(async ({ onStreamEvent }) => {
await onStreamEvent({
type: "end"
});
@@ -85,7 +95,7 @@ describe("useChat", () => {
describe("non-streaming", () => {
it("should send messages and update message history", async () => {
const { result } = renderHook(() =>
- useChat({ customerId: "mock-customer-id", corpusKeys: "1", apiKey: "mock-api-key", enableStreaming: false })
+ useChat({ customerId: "mock-customer-id", corpusKeys: "1", apiKey: "mock-api-key", enableStreaming: false })
);
sendSearchRequestSpy.mockImplementation(() => Promise.resolve(MOCK_API_RESPONSE));
@@ -95,9 +105,9 @@ describe("useChat", () => {
});
expect(sendSearchRequestSpy).toHaveBeenCalledWith(
- expect.objectContaining({
- query: "mock-query"
- })
+ expect.objectContaining({
+ query: "mock-query"
+ })
);
expect(result.current.messageHistory.length).toEqual(1);
@@ -105,7 +115,7 @@ describe("useChat", () => {
it("should reflect error state", async () => {
const { result } = renderHook(() =>
- useChat({ customerId: "mock-customer-id", corpusKeys: "1", apiKey: "mock-api-key", enableStreaming: false })
+ useChat({ customerId: "mock-customer-id", corpusKeys: "1", apiKey: "mock-api-key", enableStreaming: false })
);
sendSearchRequestSpy.mockImplementation(() => {
throw "error";
@@ -120,7 +130,7 @@ describe("useChat", () => {
it("should reflect loading state", async () => {
const { result } = renderHook(() =>
- useChat({ customerId: "mock-customer-id", corpusKeys: "1", apiKey: "mock-api-key" })
+ useChat({ customerId: "mock-customer-id", corpusKeys: "1", apiKey: "mock-api-key" })
);
sendSearchRequestSpy.mockImplementation(() => {
return new Promise(() => {});
@@ -134,9 +144,176 @@ describe("useChat", () => {
});
});
+ describe("agentic behavior", () => {
+ (globalThis as any).fetch ||= jest.fn();
+ const global = globalThis as typeof globalThis & { fetch: any };
+
+ it("should be able to respond with pre-configured messages from agentic service responses", async () => {
+ const mockFetch = jest.spyOn(global, "fetch").mockResolvedValue({
+ status: 200,
+ json: jest.fn().mockResolvedValue({ event: "mock-event" })
+ });
+
+ const { result } = renderHook(() =>
+ useChat({
+ customerId: "mock-customer-id",
+ corpusKeys: "1",
+ apiKey: "mock-api-key",
+ enableStreaming: true,
+ agenticConfiguration: {
+ url: "path/to/agentic-service",
+ onAgenticResponse: (response: AgenticResponse) => {
+ if (response.event === "mock-event") {
+ return {
+ message: AGENTIC_RESPONSE_MESSAGE_WITH_CONTENT_OVERRIDE
+ };
+ }
+ }
+ }
+ })
+ );
+
+ await act(async () => {
+ await result.current.sendMessage({ query: "Can I speak with sales?" });
+ });
+
+ await act(async () => {
+ // There should only be one item in message history, containing:
+ // - the message from the user
+ // - the answer to the user's message, provided via onAgenticResponse return value
+ expect(result.current.messageHistory.length === 1);
+ expect((result.current.messageHistory[0] as ChatTurn).answer).toEqual(
+ AGENTIC_RESPONSE_MESSAGE_WITH_CONTENT_OVERRIDE.content
+ );
+ });
+
+ mockFetch.mockRestore();
+ });
+
+ it("should be able to append messages from agentic service responses to the generated response from the query API", async () => {
+ const mockFetch = jest.spyOn(global, "fetch").mockResolvedValue({
+ status: 200,
+ json: jest.fn().mockResolvedValue({ event: "mock-event" })
+ });
+
+ const { result } = renderHook(() =>
+ useChat({
+ customerId: "mock-customer-id",
+ corpusKeys: "1",
+ apiKey: "mock-api-key",
+ enableStreaming: true,
+ agenticConfiguration: {
+ url: "path/to/agentic-service",
+ onAgenticResponse: (response: AgenticResponse) => {
+ if (response.event === "mock-event") {
+ return {
+ message: AGENTIC_RESPONSE_MESSAGE_TO_APPEND_TO_QUERY_REPONSE
+ };
+ }
+ }
+ }
+ })
+ );
+
+ streamQuerySpy.mockImplementation(async ({ onStreamEvent }) => {
+ await onStreamEvent({
+ type: "generationChunk",
+ updatedText: "mock-updated-text"
+ });
+ await onStreamEvent({
+ type: "end"
+ });
+ });
+
+ await act(async () => {
+ await result.current.sendMessage({ query: "mock-query" });
+ });
+
+ await act(async () => {
+ // There should only be one item in message history, containing:
+ // - the message from the user
+ // - the answer to the user's message, provided via onAgenticResponse return value
+ expect(result.current.messageHistory.length === 1);
+ expect((result.current.messageHistory[0] as ChatTurn).answer).toEqual(
+ `mock-updated-text
${AGENTIC_RESPONSE_MESSAGE_TO_APPEND_TO_QUERY_REPONSE.post}`
+ );
+ });
+
+ mockFetch.mockRestore();
+ });
+
+ it("should be able to show available user actions based on agentic service responses", async () => {
+ const mockFetch = jest.spyOn(global, "fetch").mockResolvedValue({
+ status: 200,
+ json: jest.fn().mockResolvedValue({ event: "mock-event" })
+ });
+
+ const userActionOptions = [
+ {
+ label: "Action 1",
+ message: "Message to chatbot after selecting Action 1"
+ },
+ {
+ label: "Action 2",
+ message: "Message to chatbot after selecting Action 2"
+ }
+ ];
+
+ const { result } = renderHook(() =>
+ useChat({
+ customerId: "mock-customer-id",
+ corpusKeys: "1",
+ apiKey: "mock-api-key",
+ enableStreaming: false,
+ agenticConfiguration: {
+ url: "path/to/agentic-service",
+ onAgenticResponse: (response: AgenticResponse) => {
+ if (response.event === "mock-event") {
+ return {
+ message: AGENTIC_RESPONSE_MESSAGE_WITH_CONTENT_OVERRIDE,
+ userActionOptions
+ };
+ }
+ }
+ }
+ })
+ );
+
+ await act(async () => {
+ await result.current.sendMessage({ query: "Can I speak with sales?" });
+ });
+
+ await act(async () => {
+ jest.advanceTimersByTime(1000);
+
+ // There should only be two items in message history
+ // - item 1 (a "turn" object), containing:
+ // - the message from the user
+ // - the answer to the user's message, provided via onAgenticResponse return value
+ // - item 2 (an "action" object), containing:
+ // - an array of configurations representing actions a user can take to respond
+ expect(result.current.messageHistory.length === 2);
+ expect(result.current.messageHistory[0]).toEqual(
+ expect.objectContaining({
+ type: "turn",
+ answer: AGENTIC_RESPONSE_MESSAGE_WITH_CONTENT_OVERRIDE.content
+ })
+ );
+ expect(result.current.messageHistory[1]).toEqual(
+ expect.objectContaining({
+ type: "action",
+ options: userActionOptions
+ })
+ );
+ });
+
+ mockFetch.mockRestore();
+ });
+ });
+
it("should be able to reset the conversation", async () => {
const { result } = renderHook(() =>
- useChat({ customerId: "mock-customer-id", corpusKeys: "1", apiKey: "mock-api-key", enableStreaming: false })
+ useChat({ customerId: "mock-customer-id", corpusKeys: "1", apiKey: "mock-api-key", enableStreaming: false })
);
sendSearchRequestSpy.mockImplementation(() => Promise.resolve(MOCK_API_RESPONSE));
@@ -147,18 +324,18 @@ describe("useChat", () => {
// Assert that the second request uses the current conversation id.
expect(sendSearchRequestSpy).toHaveBeenCalledWith(
- expect.objectContaining({
- chat: { store: true, conversationId: "mock-conversation-id" }
- })
+ expect.objectContaining({
+ chat: { store: true, conversationId: "mock-conversation-id" }
+ })
);
sendSearchRequestSpy.mockImplementation(() =>
- Promise.resolve({
- ...MOCK_API_RESPONSE,
- chat_id: "mock-conversation-id-2",
- turn_id: "mock-turn-id",
- answer: "mock-answer"
- })
+ Promise.resolve({
+ ...MOCK_API_RESPONSE,
+ chat_id: "mock-conversation-id-2",
+ turn_id: "mock-turn-id",
+ answer: "mock-answer"
+ })
);
await act(async () => {
@@ -177,4 +354,4 @@ describe("useChat", () => {
// Assert that the request after reset is has no conversation id.
expect(recentSendSearchRequestCall.chat.conversationId).toEqual(undefined);
});
-});
\ No newline at end of file
+});
diff --git a/src/useChat.ts b/src/useChat.ts
index 28a2bad..4d13a1a 100644
--- a/src/useChat.ts
+++ b/src/useChat.ts
@@ -1,15 +1,22 @@
-import {useEffect, useRef, useState} from "react";
+import { useEffect, useRef, useState } from "react";
import {
+ AgenticMessageHistoryItem,
+ AgenticResponse,
+ AgenticResponseActionConfiguration,
+ ChatActionOption,
ChatQueryResponse,
- ChatTurn, END_TAG,
+ ChatTurn,
+ END_TAG,
+ MessageHistoryItem,
mmrRerankerId,
SearchResult,
- SearchResultWithSnippet, START_TAG,
+ SearchResultWithSnippet,
+ START_TAG,
SummaryLanguage
} from "types";
-import {ApiV2, streamQueryV2} from "@vectara/stream-query-client";
-import {parseSnippet} from "./utils/parseSnippet";
-import {sendSearchRequest} from "./utils/sendSearchRequest";
+import { ApiV2, streamQueryV2 } from "@vectara/stream-query-client";
+import { parseSnippet } from "./utils/parseSnippet";
+import { sendSearchRequest } from "./utils/sendSearchRequest";
/**
* A hook that exposes:
@@ -23,9 +30,9 @@ import {sendSearchRequest} from "./utils/sendSearchRequest";
*/
export const DEFAULT_SUMMARIZER = "vectara-summary-ext-v1.2.0";
-export const DEFAULT_RERANKER_ID = 272725718
+export const DEFAULT_RERANKER_ID = 272725718;
-export const DEFAULT_LAMBDA_VALUE = 0.005
+export const DEFAULT_LAMBDA_VALUE = 0.005;
type UseChatConfig = {
customerId: string;
@@ -38,6 +45,11 @@ type UseChatConfig = {
rerankerId?: number;
lambda?: number;
enableStreaming?: boolean;
+ agenticConfiguration?: {
+ url: string;
+ onAgenticResponse: (response: AgenticResponse) => AgenticResponseActionConfiguration | undefined;
+ };
+ requestSource?: string;
};
export const useChat = ({
@@ -51,16 +63,23 @@ export const useChat = ({
rerankerId = DEFAULT_RERANKER_ID,
lambda = DEFAULT_LAMBDA_VALUE,
enableStreaming = true,
+ agenticConfiguration,
+ requestSource
}: UseChatConfig) => {
- const [messageHistory, setMessageHistory] = useState([]);
+ const [messageHistory, setMessageHistory] = useState>([]);
const recentQuestion = useRef("");
const [activeMessage, setActiveMessage] = useState(null);
const [isLoading, setIsLoading] = useState(false);
const [isStreamingResponse, setIsStreamingResponse] = useState(false);
const [conversationId, setConversationId] = useState(null);
const [hasError, setHasError] = useState(false);
+ const [userActionOptions, setUserActionOptions] = useState | null>(null);
const sendMessage = async ({ query, isRetry = false }: { query: string; isRetry?: boolean }) => {
+ let agenticResponseMessageContent: string | undefined;
+ let agenticResponseMessagePostContent: string | undefined;
+ let agenticResponseActionConfiguration: AgenticResponseActionConfiguration | undefined;
+
if (isLoading) return;
if (isRetry) {
@@ -70,197 +89,309 @@ export const useChat = ({
setActiveMessage(null);
recentQuestion.current = query;
+ setIsLoading(true);
+
// Optimistically add a placeholder entry as the active message.
// We'll replace this later on with data from the server.
setActiveMessage({
id: "placeholder-message-id",
+ type: "turn",
question: query,
answer: "",
results: [],
factualConsistencyScore: undefined
});
- setIsLoading(true);
+
+ // Query the agentic url if an agentic configuration was defined.
+ if (agenticConfiguration) {
+ // Extract all chat turns from message history.
+ // These message history items, which contain messages to and from the chatbot,
+ // will be sent to the agentic service for evaluation.
+ const chatTurns: Array = messageHistory.reduce(
+ (acc: Array, messageHistoryItem: MessageHistoryItem) => {
+ if (messageHistoryItem.type === "turn") {
+ acc.push(messageHistoryItem as ChatTurn);
+ }
+ return acc;
+ },
+ []
+ );
+
+ // Format the chat turn messages for the agentic service.
+ const agenticMessageHistory: Array = chatTurns.reduce(
+ (acc: Array, chatTurn: ChatTurn) => {
+ acc.push({
+ role: "user",
+ message: chatTurn.question
+ });
+
+ if (chatTurn.answer) {
+ acc.push({
+ role: "chatbot",
+ message: chatTurn.answer
+ });
+ }
+ return acc;
+ },
+ []
+ );
+
+ // For the last item, push the user's most recent message.
+ // This is not part of message history yet, as it's currently set as the active message.
+ agenticMessageHistory.push({
+ role: "user",
+ message: recentQuestion.current
+ });
+
+ const agenticResponse = await fetch(agenticConfiguration.url, {
+ method: "POST",
+ headers: {
+ "Content-Type": "application/json"
+ },
+ body: JSON.stringify({ messages: agenticMessageHistory })
+ });
+
+ const responseData = await agenticResponse.json();
+ agenticResponseActionConfiguration = agenticConfiguration.onAgenticResponse(responseData);
+
+ ({ content: agenticResponseMessageContent, post: agenticResponseMessagePostContent } =
+ agenticResponseActionConfiguration?.message ?? {});
+ }
+
let resultsWithSnippets: SearchResultWithSnippet[];
+
if (enableStreaming) {
- try {
+ setIsStreamingResponse(true);
- const onStreamEvent = (event: ApiV2.StreamEvent) => {
- switch (event.type) {
- case "requestError":
- case "genericError":
- case "error":
- setHasError(true);
- setIsLoading(false);
- break;
-
- case "chatInfo":
- setConversationId(event.chatId);
- setActiveMessage((prevState) => ({
- id: event.chatId,
- question: recentQuestion.current,
- answer: prevState?.answer ?? "",
- results: prevState?.results ?? [],
- }));
-
- break;
-
- case "searchResults":
- resultsWithSnippets = event.searchResults.map((result: SearchResult) => {
- const { pre, text, post } = parseSnippet(result.text);
-
- return {
- ...result,
- snippet: {
- pre,
- text,
- post
- }
- };
- });
-
- setActiveMessage((prevState) => ({
- id: prevState?.id ?? "",
- question: recentQuestion.current,
- answer: prevState?.answer ?? "",
- results: resultsWithSnippets
- }));
- break;
-
- case "generationChunk":
- setIsStreamingResponse(true);
- setIsLoading(false);
- setActiveMessage((prevState) => ({
- id: prevState?.id ?? "",
- question: recentQuestion.current,
- answer: event.updatedText ?? "",
- results: prevState?.results ?? [],
- }));
- break;
-
- case "factualConsistencyScore":
- setActiveMessage((prevState) => ({
- id: prevState?.id ?? "",
- question: recentQuestion.current,
- answer: prevState?.answer ?? "",
- results: prevState?.results ?? [],
- factualConsistencyScore: event.factualConsistencyScore
- }));
- break;
-
- case "end":
- setIsStreamingResponse(false);
- break;
- }
- };
-
- const streamQueryConfig: ApiV2.StreamQueryConfig = {
- apiKey: apiKey!,
- customerId: customerId!,
- query: query,
- corpusKey: corpusKeys!,
- search: {
- offset: 0,
- metadataFilter: "",
- lexicalInterpolation: lambda,
- reranker: rerankerId === mmrRerankerId
- ? {
- type: "mmr",
- diversityBias: 0
+ // If the agentic response overrides the query API, simply respond with the overriding content
+ // Otherwise, hit the chat API. Note that if specified by the agentic service, we may need to append some custom content
+ // to the end of the chat API's generated message.
+ if (agenticResponseMessageContent) {
+ setIsLoading(false);
+ await streamAgenticAnswer(agenticResponseMessageContent);
+
+ if (agenticResponseMessagePostContent) {
+ await streamAgenticAnswer("", true);
+ await streamAgenticAnswer(agenticResponseMessagePostContent);
+ }
+
+ setIsStreamingResponse(false);
+ setUserActionOptions(agenticResponseActionConfiguration?.userActionOptions ?? null);
+ } else {
+ try {
+ const onStreamEvent = async (event: ApiV2.StreamEvent) => {
+ switch (event.type) {
+ case "requestError":
+ case "genericError":
+ case "error":
+ setHasError(true);
+ setIsLoading(false);
+ break;
+
+ case "chatInfo":
+ setConversationId(event.chatId);
+ setActiveMessage((prevState) => ({
+ id: event.chatId,
+ type: "turn",
+ question: recentQuestion.current,
+ answer: prevState?.answer ?? "",
+ results: prevState?.results ?? []
+ }));
+
+ break;
+
+ case "searchResults":
+ resultsWithSnippets = event.searchResults.map((result: SearchResult) => {
+ agenticResponseActionConfiguration?.userActionOptions;
+ const { pre, text, post } = parseSnippet(result.text);
+
+ return {
+ ...result,
+ snippet: {
+ pre,
+ text,
+ post
+ }
+ };
+ });
+
+ setActiveMessage((prevState) => ({
+ id: prevState?.id ?? "",
+ type: "turn",
+ question: recentQuestion.current,
+ answer: prevState?.answer ?? "",
+ results: resultsWithSnippets
+ }));
+ break;
+
+ case "generationChunk":
+ setIsStreamingResponse(true);
+ setIsLoading(false);
+ setActiveMessage((prevState) => ({
+ id: prevState?.id ?? "",
+ type: "turn",
+ question: recentQuestion.current,
+ answer: event.updatedText ?? "",
+ results: prevState?.results ?? []
+ }));
+ break;
+
+ case "factualConsistencyScore":
+ setActiveMessage((prevState) => ({
+ id: prevState?.id ?? "",
+ type: "turn",
+ question: recentQuestion.current,
+ answer: prevState?.answer ?? "",
+ results: prevState?.results ?? [],
+ factualConsistencyScore: event.factualConsistencyScore
+ }));
+ break;
+
+ case "end":
+ // If there is agentic content to be displayed post-query response,
+ // add a line break and then append the agentic content.
+ if (agenticResponseMessagePostContent) {
+ await streamAgenticAnswer("", true);
+ await streamAgenticAnswer(agenticResponseMessagePostContent);
}
- : {
- type: "customer_reranker",
- // rnk_ prefix needed for conversion from API v1 to v2.
- rerankerId: `rnk_${rerankerId}`
- },
- contextConfiguration: {
- sentencesBefore: 2,
- sentencesAfter: 2,
- startTag: START_TAG,
- endTag: END_TAG
+ setIsStreamingResponse(false);
+
+ // If the agentic configurations prescribes showing the user some actions they can take, add them now.
+ setUserActionOptions(agenticResponseActionConfiguration?.userActionOptions ?? null);
+
+ break;
}
- },
+ };
- chat: { store: true, conversationId: conversationId ?? undefined },
- generation: {
- promptName: summaryPromptName,
- maxUsedSearchResults: numberOfSearchResults,
- enableFactualConsistencyScore: enableFactualConsistencyScore,
- responseLanguage: language
+ const streamQueryConfig: ApiV2.StreamQueryConfig = {
+ apiKey: apiKey!,
+ customerId: customerId!,
+ query: query,
+ corpusKey: corpusKeys!,
+ search: {
+ offset: 0,
+ metadataFilter: "",
+ lexicalInterpolation: lambda,
+ reranker:
+ rerankerId === mmrRerankerId
+ ? {
+ type: "mmr",
+ diversityBias: 0
+ }
+ : {
+ type: "customer_reranker",
+ // rnk_ prefix needed for conversion from API v1 to v2.
+ rerankerId: `rnk_${rerankerId}`
+ },
+ contextConfiguration: {
+ sentencesBefore: 2,
+ sentencesAfter: 2,
+ startTag: START_TAG,
+ endTag: END_TAG
+ }
+ },
- }
- };
+ chat: { store: true, conversationId: conversationId ?? undefined },
+ generation: {
+ generationPresetName: summaryPromptName,
+ maxUsedSearchResults: numberOfSearchResults,
+ enableFactualConsistencyScore: enableFactualConsistencyScore,
+ responseLanguage: language
+ }
+ };
- await streamQueryV2({ streamQueryConfig, onStreamEvent })
+ await streamQueryV2({ streamQueryConfig, onStreamEvent, requestSource });
+ } catch (error) {
+ console.log("Summary error", error);
+ setHasError(true);
+ setIsLoading(false);
+ return;
+ }
}
- catch (error) {
- console.log("Summary error", error);
- setHasError(true);
- setIsLoading(false);
- return;
- }
- }
- else {
+ } else {
try {
- const response: ChatQueryResponse = await sendSearchRequest({
- apiKey: apiKey!,
- customerId: customerId!,
- query: query,
- corpusKeys: corpusKeys!,
- search: {
- offset: 0,
- metadataFilter: "",
- lexicalInterpolation: lambda,
- reranker: rerankerId === mmrRerankerId
- ? {
- type: "mmr",
- diversityBias: 0
- }
- : {
- type: "customer_reranker",
- // rnk_ prefix needed for conversion from API v1 to v2.
- rerankerId: `rnk_${rerankerId}`
- },
- contextConfiguration: {
- sentencesBefore: 2,
- sentencesAfter: 2,
- startTag: START_TAG,
- endTag: END_TAG
+ if (agenticResponseMessageContent) {
+ setMessageHistory((prev) => [
+ ...prev,
+ {
+ id: `agentic-query-${Date.now()}`,
+ type: "turn",
+ question: recentQuestion.current,
+ answer:
+ agenticResponseMessageContent +
+ (agenticResponseMessagePostContent ? `
${agenticResponseMessagePostContent}` : ""),
+ results: resultsWithSnippets ?? [],
+ factualConsistencyScore: undefined
}
- },
-
- chat: {store: true, conversationId: conversationId ?? undefined},
- generation: {
- promptName: summaryPromptName,
- maxUsedSearchResults: numberOfSearchResults,
- enableFactualConsistencyScore: enableFactualConsistencyScore,
- responseLanguage: language
+ ]);
+ } else {
+ const response: ChatQueryResponse = await sendSearchRequest({
+ apiKey: apiKey!,
+ customerId: customerId!,
+ query: query,
+ corpusKeys: corpusKeys!,
+ search: {
+ offset: 0,
+ metadataFilter: "",
+ lexicalInterpolation: lambda,
+ reranker:
+ rerankerId === mmrRerankerId
+ ? {
+ type: "mmr",
+ diversityBias: 0
+ }
+ : {
+ type: "customer_reranker",
+ // rnk_ prefix needed for conversion from API v1 to v2.
+ rerankerId: `rnk_${rerankerId}`
+ },
+ contextConfiguration: {
+ sentencesBefore: 2,
+ sentencesAfter: 2,
+ startTag: START_TAG,
+ endTag: END_TAG
+ }
+ },
- }
- })
+ chat: { store: true, conversationId: conversationId ?? undefined },
+ generation: {
+ promptName: summaryPromptName,
+ maxUsedSearchResults: numberOfSearchResults,
+ enableFactualConsistencyScore: enableFactualConsistencyScore,
+ responseLanguage: language
+ }
+ });
- resultsWithSnippets = response.search_results.map((result: SearchResult) => {
- const { pre, text, post } = parseSnippet(result.text);
+ resultsWithSnippets = response.search_results.map((result: SearchResult) => {
+ const { pre, text, post } = parseSnippet(result.text);
- return {
- ...result,
- snippet: {
- pre,
- text,
- post
+ return {
+ ...result,
+ snippet: {
+ pre,
+ text,
+ post
+ }
+ };
+ });
+ setConversationId(response.chat_id);
+ setMessageHistory((prev) => [
+ ...prev,
+ {
+ id: response.chat_id,
+ type: "turn",
+ question: recentQuestion.current,
+ answer:
+ (response?.answer ?? "") +
+ (agenticResponseMessagePostContent ? `
${agenticResponseMessagePostContent}` : ""),
+ results: resultsWithSnippets ?? [],
+ factualConsistencyScore: response.factual_consistency_score
}
- };
- });
- setConversationId(response.chat_id);
- setMessageHistory((prev) => [
- ...prev,
- {
- id: response.chat_id,
- question: recentQuestion.current,
- answer: response?.answer ?? "",
- results: resultsWithSnippets ?? [],
- factualConsistencyScore: response.factual_consistency_score
- }
- ]);
+ ]);
+ }
+
+ setUserActionOptions(agenticResponseActionConfiguration?.userActionOptions ?? null);
+
setActiveMessage(null);
setIsLoading(false);
} catch (error) {
@@ -269,7 +400,6 @@ export const useChat = ({
setIsLoading(false);
return;
}
-
}
};
@@ -278,6 +408,46 @@ export const useChat = ({
setConversationId(null);
};
+ // Given a string, adds this string to the message feed as part of the chatbot's
+ // answer to the user's most recent question.
+ const streamAgenticAnswer = (message: string, addLineBreak?: boolean): Promise => {
+ return new Promise((resolve) => {
+ const chars = message.split("");
+
+ const activeMessageBasis: Omit = {
+ id: "placeholder-message-id",
+ type: "turn",
+ question: recentQuestion.current,
+ factualConsistencyScore: undefined
+ };
+
+ // Start typing the message to the user.
+ const interval = window.setInterval(() => {
+ const charToAdd = chars.shift() ?? "";
+
+ setActiveMessage((prev) => {
+ return {
+ ...activeMessageBasis,
+ results: prev?.results ?? [],
+ answer: (prev?.answer ?? "") + charToAdd
+ };
+ });
+
+ if (chars.length === 0) {
+ clearInterval(interval);
+
+ if (addLineBreak)
+ setActiveMessage((prev) => ({
+ ...activeMessageBasis,
+ results: prev?.results ?? [],
+ answer: (prev?.answer ?? "") + "
"
+ }));
+ resolve();
+ }
+ }, 10);
+ });
+ };
+
// Handle this in an effect instead of directly in the onStreamEvent callback
// because onStreamEvent doesn't have access to the latest state of activeMessage.
useEffect(() => {
@@ -287,6 +457,23 @@ export const useChat = ({
}
}, [isStreamingResponse]);
+ useEffect(() => {
+ if (userActionOptions) {
+ setMessageHistory((prev) => {
+ return [
+ ...prev,
+ {
+ id: `user-action-${Date.now().toString()}`,
+ type: "action",
+ options: userActionOptions
+ }
+ ];
+ });
+ }
+
+ setUserActionOptions(null);
+ }, [userActionOptions]);
+
return {
sendMessage,
activeMessage,