mirror of
https://github.com/LukeHagar/better-auth.git
synced 2025-12-09 20:27:44 +00:00
docs: inkeep migration for chat completion (#5193)
This commit is contained in:
committed by
GitHub
parent
9f4f11f45b
commit
b59e1e4bf0
30
docs/app/api/chat/route.ts
Normal file
30
docs/app/api/chat/route.ts
Normal file
@@ -0,0 +1,30 @@
|
||||
import { ProvideLinksToolSchema } from "@/lib/chat/inkeep-qa-schema";
|
||||
import { createOpenAICompatible } from "@ai-sdk/openai-compatible";
|
||||
import { convertToModelMessages, streamText } from "ai";
|
||||
|
||||
export const runtime = "edge";
|
||||
|
||||
const openai = createOpenAICompatible({
|
||||
name: "inkeep",
|
||||
apiKey: process.env.INKEEP_API_KEY,
|
||||
baseURL: "https://api.inkeep.com/v1",
|
||||
});
|
||||
|
||||
export async function POST(req: Request) {
|
||||
const reqJson = await req.json();
|
||||
|
||||
const result = streamText({
|
||||
model: openai("inkeep-qa-sonnet-4"),
|
||||
tools: {
|
||||
provideLinks: {
|
||||
inputSchema: ProvideLinksToolSchema,
|
||||
},
|
||||
},
|
||||
messages: convertToModelMessages(reqJson.messages, {
|
||||
ignoreIncompleteToolCalls: true,
|
||||
}),
|
||||
toolChoice: "auto",
|
||||
});
|
||||
|
||||
return result.toUIMessageStreamResponse();
|
||||
}
|
||||
@@ -10,34 +10,51 @@ import {
|
||||
useRef,
|
||||
useState,
|
||||
} from "react";
|
||||
import { Loader2, SearchIcon, Send, X } from "lucide-react";
|
||||
import { Loader2, RefreshCw, SearchIcon, Send, X } from "lucide-react";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { buttonVariants } from "fumadocs-ui/components/ui/button";
|
||||
import Link from "fumadocs-core/link";
|
||||
import { type UIMessage, useChat, type UseChatHelpers } from "@ai-sdk/react";
|
||||
import type { ProvideLinksToolSchema } from "@/lib/chat/inkeep-qa-schema";
|
||||
import type { z } from "zod";
|
||||
import { DefaultChatTransport } from "ai";
|
||||
import { Markdown } from "./markdown";
|
||||
import { Presence } from "@radix-ui/react-presence";
|
||||
import { betterFetch } from "@better-fetch/fetch";
|
||||
|
||||
const Context = createContext<{
|
||||
open: boolean;
|
||||
setOpen: (open: boolean) => void;
|
||||
messages: Array<{ id: string; role: "user" | "assistant"; content: string }>;
|
||||
isLoading: boolean;
|
||||
sendMessage: (text: string) => void;
|
||||
clearMessages: () => void;
|
||||
chat: UseChatHelpers<UIMessage>;
|
||||
} | null>(null);
|
||||
|
||||
function useChatContext() {
|
||||
return use(Context)!;
|
||||
return use(Context)!.chat;
|
||||
}
|
||||
|
||||
function SearchAIActions() {
|
||||
const { messages, isLoading, clearMessages } = useChatContext();
|
||||
const { messages, status, setMessages, regenerate } = useChatContext();
|
||||
const isLoading = status === "streaming";
|
||||
|
||||
if (messages.length === 0) return null;
|
||||
|
||||
return (
|
||||
<>
|
||||
{!isLoading && messages.at(-1)?.role === "assistant" && (
|
||||
<button
|
||||
type="button"
|
||||
className={cn(
|
||||
buttonVariants({
|
||||
color: "secondary",
|
||||
size: "sm",
|
||||
className: "rounded-full gap-1.5",
|
||||
}),
|
||||
)}
|
||||
onClick={() => regenerate()}
|
||||
>
|
||||
<RefreshCw className="size-4" />
|
||||
Retry
|
||||
</button>
|
||||
)}
|
||||
<button
|
||||
type="button"
|
||||
className={cn(
|
||||
@@ -47,7 +64,7 @@ function SearchAIActions() {
|
||||
className: "rounded-full",
|
||||
}),
|
||||
)}
|
||||
onClick={clearMessages}
|
||||
onClick={() => setMessages([])}
|
||||
>
|
||||
Clear Chat
|
||||
</button>
|
||||
@@ -55,16 +72,28 @@ function SearchAIActions() {
|
||||
);
|
||||
}
|
||||
|
||||
const suggestions = [
|
||||
"How do I set up authentication with Better Auth?",
|
||||
"How to integrate Better Auth with NextJs?",
|
||||
"How to add two-factor authentication?",
|
||||
"How to setup SSO with Google?",
|
||||
];
|
||||
|
||||
function SearchAIInput(props: ComponentProps<"form">) {
|
||||
const { sendMessage, isLoading } = useChatContext();
|
||||
const { status, sendMessage, stop, messages } = useChatContext();
|
||||
const [input, setInput] = useState("");
|
||||
const isLoading = status === "streaming" || status === "submitted";
|
||||
const showSuggestions = messages.length === 0 && !isLoading;
|
||||
|
||||
const onStart = (e?: SyntheticEvent) => {
|
||||
e?.preventDefault();
|
||||
if (input.trim()) {
|
||||
sendMessage(input.trim());
|
||||
void sendMessage({ text: input });
|
||||
setInput("");
|
||||
}
|
||||
};
|
||||
|
||||
const handleSuggestionClick = (suggestion: string) => {
|
||||
setInput(suggestion);
|
||||
void sendMessage({ text: suggestion });
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
@@ -72,6 +101,7 @@ function SearchAIInput(props: ComponentProps<"form">) {
|
||||
}, [isLoading]);
|
||||
|
||||
return (
|
||||
<div className="flex flex-col">
|
||||
<form
|
||||
{...props}
|
||||
className={cn("flex items-start pe-2", props.className)}
|
||||
@@ -82,7 +112,7 @@ function SearchAIInput(props: ComponentProps<"form">) {
|
||||
placeholder={isLoading ? "AI is answering..." : "Ask AI"}
|
||||
autoFocus
|
||||
className="p-4"
|
||||
disabled={isLoading}
|
||||
disabled={status === "streaming" || status === "submitted"}
|
||||
onChange={(e) => {
|
||||
setInput(e.target.value);
|
||||
}}
|
||||
@@ -92,6 +122,21 @@ function SearchAIInput(props: ComponentProps<"form">) {
|
||||
}
|
||||
}}
|
||||
/>
|
||||
{isLoading ? (
|
||||
<button
|
||||
key="bn"
|
||||
type="button"
|
||||
className={cn(
|
||||
buttonVariants({
|
||||
color: "secondary",
|
||||
className: "transition-all rounded-full mt-2 gap-2",
|
||||
}),
|
||||
)}
|
||||
onClick={stop}
|
||||
>
|
||||
<Loader2 className="size-4 animate-spin text-fd-muted-foreground" />
|
||||
</button>
|
||||
) : (
|
||||
<button
|
||||
key="bn"
|
||||
type="submit"
|
||||
@@ -101,15 +146,32 @@ function SearchAIInput(props: ComponentProps<"form">) {
|
||||
className: "transition-all rounded-full mt-2",
|
||||
}),
|
||||
)}
|
||||
disabled={input.length === 0 || isLoading}
|
||||
disabled={input.length === 0}
|
||||
>
|
||||
{isLoading ? (
|
||||
<Loader2 className="size-4 animate-spin" />
|
||||
) : (
|
||||
<Send className="size-4" />
|
||||
)}
|
||||
</button>
|
||||
)}
|
||||
</form>
|
||||
|
||||
{showSuggestions && (
|
||||
<div className="mt-3 px-2">
|
||||
<p className="text-xs font-medium text-fd-muted-foreground mb-2">
|
||||
Try asking:
|
||||
</p>
|
||||
<div className="flex flex-wrap gap-2">
|
||||
{suggestions.slice(0, 4).map((suggestion, i) => (
|
||||
<button
|
||||
key={i}
|
||||
onClick={() => handleSuggestionClick(suggestion)}
|
||||
className="text-xs px-3 py-1.5 bg-fd-muted/30 hover:bg-fd-muted/50 text-fd-muted-foreground hover:text-fd-foreground rounded-full border border-fd-border/50 hover:border-fd-border transition-all duration-200 text-left"
|
||||
>
|
||||
{suggestion}
|
||||
</button>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -185,56 +247,53 @@ const roleName: Record<string, string> = {
|
||||
function Message({
|
||||
message,
|
||||
...props
|
||||
}: {
|
||||
message: {
|
||||
id: string;
|
||||
role: "user" | "assistant";
|
||||
content: string;
|
||||
references?: Array<{ link: string; title: string; icon?: string }>;
|
||||
isStreaming?: boolean;
|
||||
};
|
||||
} & ComponentProps<"div">) {
|
||||
}: { message: UIMessage } & ComponentProps<"div">) {
|
||||
let markdown = "";
|
||||
let links: z.infer<typeof ProvideLinksToolSchema>["links"] = [];
|
||||
|
||||
for (const part of message.parts ?? []) {
|
||||
if (part.type === "text") {
|
||||
const textWithCitations = part.text.replace(
|
||||
/\((\d+)\)/g,
|
||||
'<pre className="font-mono text-xs"> ($1) </pre>',
|
||||
);
|
||||
markdown += textWithCitations;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (part.type === "tool-provideLinks" && part.input) {
|
||||
links = (part.input as z.infer<typeof ProvideLinksToolSchema>).links;
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<div {...props}>
|
||||
<p
|
||||
className={cn(
|
||||
"mb-2 text-sm font-medium text-fd-muted-foreground",
|
||||
"mb-1 text-sm font-medium text-fd-muted-foreground",
|
||||
message.role === "assistant" && "text-fd-primary",
|
||||
)}
|
||||
></p>
|
||||
>
|
||||
{roleName[message.role] ?? "unknown"}
|
||||
</p>
|
||||
<div className="prose text-sm">
|
||||
<Markdown text={message.content} />
|
||||
{message.isStreaming && (
|
||||
<span className="inline-block w-2 h-4 bg-fd-primary ml-1 animate-pulse" />
|
||||
)}
|
||||
<Markdown text={markdown} />
|
||||
</div>
|
||||
{message.references &&
|
||||
message.references.length > 0 &&
|
||||
!message.isStreaming && (
|
||||
{links && links.length > 0 && (
|
||||
<div className="mt-3 flex flex-col gap-2">
|
||||
<p className="text-xs font-medium text-fd-muted-foreground">
|
||||
References:
|
||||
</p>
|
||||
<div className="flex flex-col gap-1">
|
||||
{message.references.map((ref, i) => (
|
||||
{links.map((item, i) => (
|
||||
<Link
|
||||
key={i}
|
||||
href={ref.link}
|
||||
href={item.url}
|
||||
className="flex items-center gap-2 text-xs rounded-lg border p-2 hover:bg-fd-accent hover:text-fd-accent-foreground transition-colors"
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
>
|
||||
{ref.icon && (
|
||||
<img
|
||||
src={ref.icon}
|
||||
alt=""
|
||||
className="w-4 h-4 flex-shrink-0"
|
||||
onError={(e) => {
|
||||
e.currentTarget.style.display = "none";
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
<span className="truncate">{ref.title}</span>
|
||||
<span className="truncate">{item.title || item.label}</span>
|
||||
</Link>
|
||||
))}
|
||||
</div>
|
||||
@@ -245,254 +304,16 @@ function Message({
|
||||
}
|
||||
|
||||
export function AISearchTrigger() {
|
||||
if (process.env.NEXT_PUBLIC_ENABLE_AI_CHAT !== "true") {
|
||||
return null;
|
||||
}
|
||||
const [open, setOpen] = useState(false);
|
||||
const [messages, setMessages] = useState<
|
||||
Array<{
|
||||
id: string;
|
||||
role: "user" | "assistant";
|
||||
content: string;
|
||||
references?: Array<{ link: string; title: string; icon?: string }>;
|
||||
isStreaming?: boolean;
|
||||
}>
|
||||
>([]);
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
const [input, setInput] = useState("");
|
||||
const [sessionId, setSessionId] = useState<string>("");
|
||||
const [questionCount, setQuestionCount] = useState(0);
|
||||
|
||||
const streamText = (
|
||||
messageId: string,
|
||||
fullText: string,
|
||||
references?: Array<{ link: string; title: string; icon?: string }>,
|
||||
) => {
|
||||
const words = fullText.split(" ");
|
||||
let currentText = "";
|
||||
let wordIndex = 0;
|
||||
|
||||
const streamInterval = setInterval(() => {
|
||||
if (wordIndex < words.length) {
|
||||
currentText += (wordIndex > 0 ? " " : "") + words[wordIndex];
|
||||
wordIndex++;
|
||||
|
||||
setMessages((prev) =>
|
||||
prev.map((msg) =>
|
||||
msg.id === messageId
|
||||
? {
|
||||
...msg,
|
||||
content: currentText,
|
||||
isStreaming: false,
|
||||
}
|
||||
: msg,
|
||||
),
|
||||
);
|
||||
} else {
|
||||
setMessages((prev) =>
|
||||
prev.map((msg) =>
|
||||
msg.id === messageId
|
||||
? { ...msg, isStreaming: false, references }
|
||||
: msg,
|
||||
),
|
||||
);
|
||||
clearInterval(streamInterval);
|
||||
}
|
||||
}, 30);
|
||||
|
||||
return () => clearInterval(streamInterval);
|
||||
};
|
||||
|
||||
const sendMessage = async (text: string) => {
|
||||
if (!text.trim()) return;
|
||||
|
||||
const userMessage = {
|
||||
id: Date.now().toString(),
|
||||
role: "user" as const,
|
||||
content: text,
|
||||
};
|
||||
|
||||
setMessages((prev) => [...prev, userMessage]);
|
||||
setIsLoading(true);
|
||||
setQuestionCount((prev) => prev + 1);
|
||||
|
||||
const messageId = (Date.now() + 1).toString();
|
||||
const assistantMessage = {
|
||||
id: messageId,
|
||||
role: "assistant" as const,
|
||||
content: "",
|
||||
isStreaming: true,
|
||||
};
|
||||
|
||||
setMessages((prev) => [...prev, assistantMessage]);
|
||||
|
||||
try {
|
||||
const currentQuestionNumber = questionCount + 1;
|
||||
const isFirstQuestion = currentQuestionNumber === 1;
|
||||
const isSecondQuestion = currentQuestionNumber === 2;
|
||||
|
||||
if (!isFirstQuestion) {
|
||||
const requestBody: any = {
|
||||
question: text,
|
||||
stream: false,
|
||||
fetch_existing: true,
|
||||
};
|
||||
|
||||
if (!isSecondQuestion && sessionId) {
|
||||
requestBody.session_id = sessionId;
|
||||
}
|
||||
|
||||
const { data, error } = await betterFetch<{
|
||||
content?: string;
|
||||
answer?: string;
|
||||
response?: string;
|
||||
session_id?: string;
|
||||
references?: Array<{ link: string; title: string; icon?: string }>;
|
||||
error?: string;
|
||||
}>("/api/ai-chat", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify(requestBody),
|
||||
const chat = useChat({
|
||||
id: "search",
|
||||
transport: new DefaultChatTransport({
|
||||
api: "/api/chat",
|
||||
}),
|
||||
});
|
||||
|
||||
if (error) {
|
||||
console.error("API Error Response:", error);
|
||||
throw new Error(`HTTP ${error.status}: ${error.message}`);
|
||||
}
|
||||
|
||||
let responseContent = "";
|
||||
if (data.content) {
|
||||
responseContent = data.content;
|
||||
} else if (data.answer) {
|
||||
responseContent = data.answer;
|
||||
} else if (data.response) {
|
||||
responseContent = data.response;
|
||||
} else if (data.error) {
|
||||
responseContent = data.error;
|
||||
} else {
|
||||
responseContent = "No response received";
|
||||
}
|
||||
|
||||
const filteredReferences = data.references?.filter(
|
||||
(ref) => !ref.link.includes("github.com"),
|
||||
);
|
||||
|
||||
streamText(messageId, responseContent, filteredReferences);
|
||||
|
||||
if (isSecondQuestion && data.session_id) {
|
||||
setSessionId(data.session_id);
|
||||
}
|
||||
} else {
|
||||
const streamRequestBody = {
|
||||
question: text,
|
||||
stream: true,
|
||||
external_user_id: "floating-ai-user",
|
||||
};
|
||||
|
||||
const streamResponse = await fetch("/api/ai-chat", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify(streamRequestBody),
|
||||
});
|
||||
|
||||
if (!streamResponse.ok) {
|
||||
throw new Error(`HTTP error! status: ${streamResponse.status}`);
|
||||
}
|
||||
|
||||
const reader = streamResponse.body?.getReader();
|
||||
const decoder = new TextDecoder();
|
||||
let accumulatedContent = "";
|
||||
|
||||
if (reader) {
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) break;
|
||||
|
||||
const chunk = decoder.decode(value, { stream: true });
|
||||
accumulatedContent += chunk;
|
||||
|
||||
setMessages((prev) =>
|
||||
prev.map((msg) =>
|
||||
msg.id === messageId
|
||||
? { ...msg, content: accumulatedContent, isStreaming: true }
|
||||
: msg,
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
setMessages((prev) =>
|
||||
prev.map((msg) =>
|
||||
msg.id === messageId ? { ...msg, isStreaming: false } : msg,
|
||||
),
|
||||
);
|
||||
|
||||
const fetchReferencesBody = {
|
||||
question: text,
|
||||
stream: false,
|
||||
fetch_existing: true,
|
||||
external_user_id: "floating-ai-user",
|
||||
};
|
||||
|
||||
const { data: referencesData } = await betterFetch<{
|
||||
references?: Array<{ link: string; title: string; icon?: string }>;
|
||||
session_id?: string;
|
||||
}>("/api/ai-chat", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify(fetchReferencesBody),
|
||||
});
|
||||
|
||||
if (
|
||||
referencesData?.references &&
|
||||
referencesData.references.length > 0
|
||||
) {
|
||||
const filteredReferences = referencesData.references.filter(
|
||||
(ref) => !ref.link.includes("github.com"),
|
||||
);
|
||||
|
||||
if (filteredReferences.length > 0) {
|
||||
setMessages((prev) =>
|
||||
prev.map((msg) =>
|
||||
msg.id === messageId
|
||||
? { ...msg, references: filteredReferences }
|
||||
: msg,
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Error sending message:", error);
|
||||
|
||||
setMessages((prev) => {
|
||||
const filtered = prev.filter((msg) => msg.id !== messageId);
|
||||
return [
|
||||
...filtered,
|
||||
{
|
||||
id: messageId,
|
||||
role: "assistant" as const,
|
||||
content:
|
||||
"Sorry, there was an error processing your request. Please try again.",
|
||||
},
|
||||
];
|
||||
});
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
const clearMessages = () => {
|
||||
setMessages([]);
|
||||
setSessionId("");
|
||||
setQuestionCount(0);
|
||||
};
|
||||
const showSuggestions =
|
||||
chat.messages.length === 0 && chat.status !== "streaming";
|
||||
|
||||
const onKeyPress = (e: KeyboardEvent) => {
|
||||
if (e.key === "Escape" && open) {
|
||||
@@ -515,24 +336,12 @@ export function AISearchTrigger() {
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<Context
|
||||
value={useMemo(
|
||||
() => ({
|
||||
messages,
|
||||
isLoading,
|
||||
sendMessage,
|
||||
clearMessages,
|
||||
open,
|
||||
setOpen,
|
||||
}),
|
||||
[messages, isLoading, open],
|
||||
)}
|
||||
>
|
||||
<Context value={useMemo(() => ({ chat, open, setOpen }), [chat, open])}>
|
||||
<RemoveScroll enabled={open}>
|
||||
<Presence present={open}>
|
||||
<div
|
||||
className={cn(
|
||||
"fixed inset-0 p-2 right-(--removed-body-scroll-bar-size,0) flex flex-col pb-[8.375rem] items-center bg-fd-background/80 backdrop-blur-sm z-50",
|
||||
"fixed inset-0 p-2 right-(--removed-body-scroll-bar-size,0) flex flex-col pb-[8.375rem] items-center bg-fd-background/80 backdrop-blur-sm z-30",
|
||||
open ? "animate-fd-fade-in" : "animate-fd-fade-out",
|
||||
)}
|
||||
onClick={(e) => {
|
||||
@@ -543,7 +352,6 @@ export function AISearchTrigger() {
|
||||
}}
|
||||
>
|
||||
<div className="sticky top-0 flex gap-2 items-center py-2 w-full max-w-[600px]">
|
||||
<p className="text-xs flex-1 text-fd-muted-foreground"></p>
|
||||
<button
|
||||
aria-label="Close"
|
||||
tabIndex={-1}
|
||||
@@ -551,7 +359,7 @@ export function AISearchTrigger() {
|
||||
buttonVariants({
|
||||
size: "icon-sm",
|
||||
color: "secondary",
|
||||
className: "rounded-full",
|
||||
className: "rounded-full ml-auto",
|
||||
}),
|
||||
)}
|
||||
onClick={() => setOpen(false)}
|
||||
@@ -567,24 +375,20 @@ export function AISearchTrigger() {
|
||||
}}
|
||||
>
|
||||
<div className="flex flex-col gap-4">
|
||||
{messages.map((item) => (
|
||||
{chat.messages
|
||||
.filter((msg: UIMessage) => msg.role !== "system")
|
||||
.map((item: UIMessage) => (
|
||||
<Message key={item.id} message={item} />
|
||||
))}
|
||||
{isLoading && (
|
||||
<div className="flex items-center gap-2 text-sm text-fd-muted-foreground">
|
||||
<Loader2 className="size-4 animate-spin" />
|
||||
AI is thinking...
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</List>
|
||||
</div>
|
||||
</Presence>
|
||||
<div
|
||||
className={cn(
|
||||
"fixed bottom-2 transition-[width,height] duration-300 ease-[cubic-bezier(0.34,1.56,0.64,1)] -translate-x-1/2 rounded-2xl border shadow-xl z-50 overflow-hidden",
|
||||
"fixed bottom-2 transition-[width,height] duration-300 ease-[cubic-bezier(0.34,1.56,0.64,1)] -translate-x-1/2 rounded-2xl border shadow-xl overflow-hidden z-30",
|
||||
open
|
||||
? "w-[min(600px,90vw)] bg-fd-popover h-32"
|
||||
? `w-[min(600px,90vw)] bg-fd-popover ${showSuggestions ? "h-48" : "h-32"}`
|
||||
: "w-40 h-10 bg-fd-secondary text-fd-secondary-foreground shadow-fd-background",
|
||||
)}
|
||||
style={{
|
||||
|
||||
47
docs/lib/chat/inkeep-qa-schema.ts
Normal file
47
docs/lib/chat/inkeep-qa-schema.ts
Normal file
@@ -0,0 +1,47 @@
|
||||
import { z } from "zod";
|
||||
|
||||
const InkeepRecordTypes = z.enum([
|
||||
"documentation",
|
||||
"site",
|
||||
"discourse_post",
|
||||
"github_issue",
|
||||
"github_discussion",
|
||||
"stackoverflow_question",
|
||||
"discord_forum_post",
|
||||
"discord_message",
|
||||
"custom_question_answer",
|
||||
]);
|
||||
|
||||
const LinkType = z.union([InkeepRecordTypes, z.string()]);
|
||||
|
||||
const LinkSchema = z.object({
|
||||
label: z.string().nullish(),
|
||||
url: z.string(),
|
||||
title: z.string().nullish(),
|
||||
type: LinkType.nullish(),
|
||||
breadcrumbs: z.array(z.string()).nullish(),
|
||||
});
|
||||
|
||||
const LinksSchema = z.array(LinkSchema).nullish();
|
||||
|
||||
export const ProvideLinksToolSchema = z.object({
|
||||
links: LinksSchema,
|
||||
});
|
||||
|
||||
const KnownAnswerConfidence = z.enum([
|
||||
"very_confident",
|
||||
"somewhat_confident",
|
||||
"not_confident",
|
||||
"no_sources",
|
||||
"other",
|
||||
]);
|
||||
|
||||
const AnswerConfidence = z.union([KnownAnswerConfidence, z.string()]); // evolvable
|
||||
|
||||
const AIAnnotationsToolSchema = z.object({
|
||||
answerConfidence: AnswerConfidence,
|
||||
});
|
||||
|
||||
export const ProvideAIAnnotationsToolSchema = z.object({
|
||||
aiAnnotations: AIAnnotationsToolSchema,
|
||||
});
|
||||
@@ -12,6 +12,8 @@
|
||||
"scripts:sync-orama": "node ./scripts/sync-orama.ts"
|
||||
},
|
||||
"dependencies": {
|
||||
"@ai-sdk/openai-compatible": "^1.0.20",
|
||||
"@ai-sdk/react": "^2.0.64",
|
||||
"@better-auth/utils": "0.3.0",
|
||||
"@better-fetch/fetch": "catalog:",
|
||||
"@hookform/resolvers": "^5.2.1",
|
||||
@@ -47,6 +49,7 @@
|
||||
"@scalar/nextjs-api-reference": "^0.8.17",
|
||||
"@vercel/analytics": "^1.5.0",
|
||||
"@vercel/og": "^0.8.5",
|
||||
"ai": "^5.0.64",
|
||||
"class-variance-authority": "^0.7.1",
|
||||
"clsx": "^2.1.1",
|
||||
"cmdk": "1.1.1",
|
||||
|
||||
465
pnpm-lock.yaml
generated
465
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user