-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathroute.ts
More file actions
146 lines (130 loc) · 4.24 KB
/
route.ts
File metadata and controls
146 lines (130 loc) · 4.24 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
import { NextRequest, NextResponse } from "next/server";
import { cookies } from "next/headers";
import OpenAI from "openai";
import { logger } from "@/lib/logger";
export async function POST(request: NextRequest) {
try {
const cookieStore = await cookies();
const userId = cookieStore.get("userId")?.value;
if (!userId) {
await logger.warn("ai_generate_access_denied", {
reason: "not_authenticated",
});
return NextResponse.json({ error: "Unauthorized" }, { status: 401 });
}
const { prompt, context, type, conversationHistory } = await request.json();
await logger.info(
"ai_generate_request",
{
type,
promptLength: prompt?.length,
contextLength: context?.length,
conversationLength: conversationHistory?.length || 0,
userQuestion: prompt,
},
userId
);
const openai = new OpenAI({
baseURL: "https://api.deepseek.com",
apiKey: process.env.DEEPSEEK_API_KEY,
});
let systemPrompt = "";
let messages: OpenAI.ChatCompletionMessageParam[] = [];
if (type === "explanation_chat") {
// For explanation chat questions
systemPrompt = `You are a helpful AI coding tutor. A student is reading through an explanation and has asked a question about a specific part. Your goal is to:
1. Provide clear, helpful explanations
2. Use simple language and examples when needed
3. Build on what they're already learning
4. Don't give away answers to coding problems directly, but guide their understanding
5. Be encouraging and supportive
6. Keep responses concise but thorough
7. Remember the conversation history and build upon previous questions/answers
The student is currently reading this explanation context:
${context}`;
// Build messages array with system prompt
messages = [{ role: "system", content: systemPrompt }];
// Add conversation history if provided
if (conversationHistory && Array.isArray(conversationHistory)) {
conversationHistory.forEach((msg) => {
let role: "user" | "assistant" = msg.role;
if (msg.role === "ai") role = "assistant";
if (role === "user" || role === "assistant") {
messages.push({
role,
content: msg.message,
});
}
});
}
// Add the current user message
messages.push({
role: "user",
content: prompt,
});
} else {
// Default behavior for other types
messages = [
{ role: "system", content: "You are a helpful assistant." },
{ role: "user", content: prompt },
];
}
// Enable streaming from DeepSeek/OpenAI
const completion = await openai.chat.completions.create({
model: "deepseek-chat",
messages,
temperature: 0.7,
max_tokens: 500,
stream: true,
});
// Create a ReadableStream to pipe chunks to the client
const stream = new ReadableStream({
async start(controller) {
let fullContent = "";
for await (const chunk of completion) {
const delta = chunk.choices?.[0]?.delta?.content || "";
if (delta) {
controller.enqueue(delta);
fullContent += delta;
}
}
controller.close();
// Optionally log the full response after streaming
await logger.info(
"ai_generate_success",
{
type,
userQuestion: prompt,
aiResponse: fullContent,
responseLength: fullContent.length,
conversationLength: conversationHistory?.length || 0,
},
userId
);
}
});
return new NextResponse(stream, {
headers: {
"Content-Type": "text/plain; charset=utf-8",
"Transfer-Encoding": "chunked",
"Cache-Control": "no-cache"
}
});
} catch (error: any) {
const cookieStore = await cookies();
const userId = cookieStore.get("userId")?.value;
await logger.error(
"ai_generate_error",
{
error: error.message,
userQuestion: prompt,
},
userId
);
console.error("Error:", error);
return NextResponse.json(
{ error: `Error occurred: ${error.message}` },
{ status: 500 }
);
}
}