diff --git a/app/(chat)/api/chat/route.ts b/app/(chat)/api/chat/route.ts index ac52197..95e137a 100644 --- a/app/(chat)/api/chat/route.ts +++ b/app/(chat)/api/chat/route.ts @@ -11,6 +11,7 @@ import { checkBotId } from "botid/server"; import { after } from "next/server"; import { createResumableStreamContext } from "resumable-stream"; import { auth, type UserType } from "@/app/(auth)/auth"; +import { fetchDocsContent } from "@/lib/ai/docs"; import { entitlementsByUserType } from "@/lib/ai/entitlements"; import { allowedModelIds, @@ -186,14 +187,17 @@ export async function POST(request: Request) { const isReasoningModel = capabilities?.reasoning === true; const supportsTools = capabilities?.tools === true; - const modelMessages = await convertToModelMessages(uiMessages); + const [modelMessages, docsContent] = await Promise.all([ + convertToModelMessages(uiMessages), + fetchDocsContent(), + ]); const stream = createUIMessageStream({ originalMessages: isToolApprovalFlow ? uiMessages : undefined, execute: async ({ writer: dataStream }) => { const result = streamText({ model: getLanguageModel(chatModel), - system: systemPrompt({ requestHints, supportsTools }), + system: systemPrompt({ requestHints, supportsTools, docsContent }), messages: modelMessages, stopWhen: stepCountIs(5), experimental_activeTools: diff --git a/app/api/widget-chat/route.ts b/app/api/widget-chat/route.ts new file mode 100644 index 0000000..1253ac2 --- /dev/null +++ b/app/api/widget-chat/route.ts @@ -0,0 +1,67 @@ +import { streamText } from "ai"; +import { fetchDocsContent } from "@/lib/ai/docs"; +import { DEFAULT_CHAT_MODEL } from "@/lib/ai/models"; +import { regularPrompt } from "@/lib/ai/prompts"; +import { getLanguageModel } from "@/lib/ai/providers"; + +export const maxDuration = 30; + +const ALLOWED_ORIGINS = ["https://www.devdocify.com", "https://devdocify.com"]; + +function corsHeaders(origin: string | null): Record { + const allowed = + (origin !== null && ALLOWED_ORIGINS.includes(origin)) || + origin?.startsWith("http://localhost"); + return { + "Access-Control-Allow-Origin": allowed + ? (origin ?? ALLOWED_ORIGINS[0]) + : ALLOWED_ORIGINS[0], + "Access-Control-Allow-Methods": "POST, OPTIONS", + "Access-Control-Allow-Headers": "Content-Type", + }; +} + +export function OPTIONS(request: Request) { + return new Response(null, { + status: 204, + headers: corsHeaders(request.headers.get("origin")), + }); +} + +export async function POST(request: Request) { + const origin = request.headers.get("origin"); + const headers = corsHeaders(origin); + + let messages: { role: string; content: string }[]; + try { + const body = await request.json(); + messages = body.messages; + if (!Array.isArray(messages) || messages.length === 0) { + return new Response("Bad request", { status: 400, headers }); + } + } catch { + return new Response("Bad request", { status: 400, headers }); + } + + const docsContent = await fetchDocsContent(); + const docsSection = docsContent + ? `\n\n## DevDocify Documentation\n\n${docsContent}` + : ""; + const system = `${regularPrompt}${docsSection}`; + + const result = streamText({ + model: getLanguageModel(DEFAULT_CHAT_MODEL), + system, + messages: messages as any, + }); + + const textResponse = result.toTextStreamResponse(); + const responseHeaders = new Headers(textResponse.headers); + for (const [k, v] of Object.entries(headers)) { + responseHeaders.set(k, v); + } + return new Response(textResponse.body, { + status: textResponse.status, + headers: responseHeaders, + }); +} diff --git a/lib/ai/docs.ts b/lib/ai/docs.ts new file mode 100644 index 0000000..d206e01 --- /dev/null +++ b/lib/ai/docs.ts @@ -0,0 +1,22 @@ +const LLMS_TXT_URL = "https://www.devdocify.com/llms.txt"; +const CACHE_TTL = 60 * 60 * 1000; // 1 hour + +let cache: { content: string; fetchedAt: number } | null = null; + +export async function fetchDocsContent(): Promise { + const now = Date.now(); + if (cache && now - cache.fetchedAt < CACHE_TTL) { + return cache.content; + } + try { + const res = await fetch(LLMS_TXT_URL, { next: { revalidate: 3600 } }); + if (!res.ok) { + return undefined; + } + const content = await res.text(); + cache = { content, fetchedAt: now }; + return content; + } catch { + return undefined; + } +} diff --git a/lib/ai/prompts.ts b/lib/ai/prompts.ts index 77d0d28..95f34e0 100644 --- a/lib/ai/prompts.ts +++ b/lib/ai/prompts.ts @@ -44,7 +44,7 @@ CRITICAL RULES: - ONLY when the user explicitly asks for suggestions on an existing document `; -export const regularPrompt = `You are a helpful assistant. Keep responses concise and direct. +export const regularPrompt = `You are a helpful assistant for DevDocify, a documentation platform. Answer questions about DevDocify's features, configuration, and usage using the provided documentation. For questions unrelated to DevDocify, you can still help as a general assistant. Keep responses concise and direct. When asked to write, create, or build something, do it immediately. Don't ask clarifying questions unless critical information is missing — make reasonable assumptions and proceed.`; @@ -66,17 +66,22 @@ About the origin of user's request: export const systemPrompt = ({ requestHints, supportsTools, + docsContent, }: { requestHints: RequestHints; supportsTools: boolean; + docsContent?: string; }) => { const requestPrompt = getRequestPromptFromHints(requestHints); + const docsSection = docsContent + ? `\n\n## DevDocify Documentation\n\n${docsContent}` + : ""; if (!supportsTools) { - return `${regularPrompt}\n\n${requestPrompt}`; + return `${regularPrompt}${docsSection}\n\n${requestPrompt}`; } - return `${regularPrompt}\n\n${requestPrompt}\n\n${artifactsPrompt}`; + return `${regularPrompt}${docsSection}\n\n${requestPrompt}\n\n${artifactsPrompt}`; }; export const codePrompt = `