Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 6 additions & 2 deletions app/(chat)/api/chat/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ import { checkBotId } from "botid/server";
import { after } from "next/server";
import { createResumableStreamContext } from "resumable-stream";
import { auth, type UserType } from "@/app/(auth)/auth";
import { fetchDocsContent } from "@/lib/ai/docs";
import { entitlementsByUserType } from "@/lib/ai/entitlements";
import {
allowedModelIds,
Expand Down Expand Up @@ -186,14 +187,17 @@ export async function POST(request: Request) {
const isReasoningModel = capabilities?.reasoning === true;
const supportsTools = capabilities?.tools === true;

const modelMessages = await convertToModelMessages(uiMessages);
const [modelMessages, docsContent] = await Promise.all([
convertToModelMessages(uiMessages),
fetchDocsContent(),
]);

const stream = createUIMessageStream({
originalMessages: isToolApprovalFlow ? uiMessages : undefined,
execute: async ({ writer: dataStream }) => {
const result = streamText({
model: getLanguageModel(chatModel),
system: systemPrompt({ requestHints, supportsTools }),
system: systemPrompt({ requestHints, supportsTools, docsContent }),
messages: modelMessages,
stopWhen: stepCountIs(5),
experimental_activeTools:
Expand Down
67 changes: 67 additions & 0 deletions app/api/widget-chat/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
import { streamText } from "ai";
import { fetchDocsContent } from "@/lib/ai/docs";
import { DEFAULT_CHAT_MODEL } from "@/lib/ai/models";
import { regularPrompt } from "@/lib/ai/prompts";
import { getLanguageModel } from "@/lib/ai/providers";

export const maxDuration = 30;

const ALLOWED_ORIGINS = ["https://www.devdocify.com", "https://devdocify.com"];

function corsHeaders(origin: string | null): Record<string, string> {
const allowed =
(origin !== null && ALLOWED_ORIGINS.includes(origin)) ||
origin?.startsWith("http://localhost");
return {
"Access-Control-Allow-Origin": allowed
? (origin ?? ALLOWED_ORIGINS[0])
: ALLOWED_ORIGINS[0],
"Access-Control-Allow-Methods": "POST, OPTIONS",
"Access-Control-Allow-Headers": "Content-Type",
};
}

export function OPTIONS(request: Request) {
return new Response(null, {
status: 204,
headers: corsHeaders(request.headers.get("origin")),
});
}

export async function POST(request: Request) {
const origin = request.headers.get("origin");
const headers = corsHeaders(origin);

let messages: { role: string; content: string }[];
try {
const body = await request.json();
messages = body.messages;
if (!Array.isArray(messages) || messages.length === 0) {
return new Response("Bad request", { status: 400, headers });
}
} catch {
return new Response("Bad request", { status: 400, headers });
}

const docsContent = await fetchDocsContent();
const docsSection = docsContent
? `\n\n## DevDocify Documentation\n\n${docsContent}`
: "";
const system = `${regularPrompt}${docsSection}`;

const result = streamText({
model: getLanguageModel(DEFAULT_CHAT_MODEL),
system,
messages: messages as any,
});

const textResponse = result.toTextStreamResponse();
const responseHeaders = new Headers(textResponse.headers);
for (const [k, v] of Object.entries(headers)) {
responseHeaders.set(k, v);
}
return new Response(textResponse.body, {
status: textResponse.status,
headers: responseHeaders,
});
}
22 changes: 22 additions & 0 deletions lib/ai/docs.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
const LLMS_TXT_URL = "https://www.devdocify.com/llms.txt";
const CACHE_TTL = 60 * 60 * 1000; // 1 hour

let cache: { content: string; fetchedAt: number } | null = null;

export async function fetchDocsContent(): Promise<string | undefined> {
const now = Date.now();
if (cache && now - cache.fetchedAt < CACHE_TTL) {
return cache.content;
}
try {
const res = await fetch(LLMS_TXT_URL, { next: { revalidate: 3600 } });
if (!res.ok) {
return undefined;
}
const content = await res.text();
cache = { content, fetchedAt: now };
return content;
} catch {
return undefined;
}
}
11 changes: 8 additions & 3 deletions lib/ai/prompts.ts
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ CRITICAL RULES:
- ONLY when the user explicitly asks for suggestions on an existing document
`;

export const regularPrompt = `You are a helpful assistant. Keep responses concise and direct.
export const regularPrompt = `You are a helpful assistant for DevDocify, a documentation platform. Answer questions about DevDocify's features, configuration, and usage using the provided documentation. For questions unrelated to DevDocify, you can still help as a general assistant. Keep responses concise and direct.

When asked to write, create, or build something, do it immediately. Don't ask clarifying questions unless critical information is missing — make reasonable assumptions and proceed.`;

Expand All @@ -66,17 +66,22 @@ About the origin of user's request:
export const systemPrompt = ({
requestHints,
supportsTools,
docsContent,
}: {
requestHints: RequestHints;
supportsTools: boolean;
docsContent?: string;
}) => {
const requestPrompt = getRequestPromptFromHints(requestHints);
const docsSection = docsContent
? `\n\n## DevDocify Documentation\n\n${docsContent}`
: "";

if (!supportsTools) {
return `${regularPrompt}\n\n${requestPrompt}`;
return `${regularPrompt}${docsSection}\n\n${requestPrompt}`;
}

return `${regularPrompt}\n\n${requestPrompt}\n\n${artifactsPrompt}`;
return `${regularPrompt}${docsSection}\n\n${requestPrompt}\n\n${artifactsPrompt}`;
};

export const codePrompt = `
Expand Down
Loading