Skip to content

Commit f5209fc

Browse files
committed
stash code
1 parent c99cd31 commit f5209fc

File tree

6 files changed

+275
-84
lines changed

6 files changed

+275
-84
lines changed

app/api/common.ts

+1-4
Original file line numberDiff line numberDiff line change
@@ -32,10 +32,7 @@ export async function requestOpenai(req: NextRequest) {
3232
authHeaderName = "Authorization";
3333
}
3434

35-
let path = `${req.nextUrl.pathname}${req.nextUrl.search}`.replaceAll(
36-
"/api/openai/",
37-
"",
38-
);
35+
let path = `${req.nextUrl.pathname}`.replaceAll("/api/openai/", "");
3936

4037
let baseUrl =
4138
(isAzure ? serverConfig.azureUrl : serverConfig.baseUrl) || OPENAI_BASE_URL;

app/client/api.ts

+9-1
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,13 @@ import {
55
ModelProvider,
66
ServiceProvider,
77
} from "../constant";
8-
import { ChatMessage, ModelType, useAccessStore, useChatStore } from "../store";
8+
import {
9+
ChatMessageTool,
10+
ChatMessage,
11+
ModelType,
12+
useAccessStore,
13+
useChatStore,
14+
} from "../store";
915
import { ChatGPTApi, DalleRequestPayload } from "./platforms/openai";
1016
import { GeminiProApi } from "./platforms/google";
1117
import { ClaudeApi } from "./platforms/anthropic";
@@ -56,6 +62,8 @@ export interface ChatOptions {
5662
onFinish: (message: string) => void;
5763
onError?: (err: Error) => void;
5864
onController?: (controller: AbortController) => void;
65+
onBeforeTool?: (tool: ChatMessageTool) => void;
66+
onAfterTool?: (tool: ChatMessageTool) => void;
5967
}
6068

6169
export interface LLMUsage {

app/client/platforms/openai.ts

+199-77
Original file line numberDiff line numberDiff line change
@@ -250,6 +250,8 @@ export class ChatGPTApi implements LLMApi {
250250
let responseText = "";
251251
let remainText = "";
252252
let finished = false;
253+
let running = false;
254+
let runTools = [];
253255

254256
// animate response to make it looks smooth
255257
function animateResponseText() {
@@ -276,99 +278,219 @@ export class ChatGPTApi implements LLMApi {
276278
// start animaion
277279
animateResponseText();
278280

281+
// TODO 后面这里是从选择的plugins中获取function列表
282+
const funcs = {
283+
get_current_weather: (args) => {
284+
console.log("call get_current_weather", args);
285+
return "30";
286+
},
287+
};
279288
const finish = () => {
280289
if (!finished) {
290+
console.log("try run tools", runTools.length, finished, running);
291+
if (!running && runTools.length > 0) {
292+
const toolCallMessage = {
293+
role: "assistant",
294+
tool_calls: [...runTools],
295+
};
296+
running = true;
297+
runTools.splice(0, runTools.length); // empty runTools
298+
return Promise.all(
299+
toolCallMessage.tool_calls.map((tool) => {
300+
options?.onBeforeTool(tool);
301+
return Promise.resolve(
302+
funcs[tool.function.name](
303+
JSON.parse(tool.function.arguments),
304+
),
305+
)
306+
.then((content) => {
307+
options?.onAfterTool({
308+
...tool,
309+
content,
310+
isError: false,
311+
});
312+
return content;
313+
})
314+
.catch((e) => {
315+
options?.onAfterTool({ ...tool, isError: true });
316+
return e.toString();
317+
})
318+
.then((content) => ({
319+
role: "tool",
320+
content,
321+
tool_call_id: tool.id,
322+
}));
323+
}),
324+
).then((toolCallResult) => {
325+
console.log("end runTools", toolCallMessage, toolCallResult);
326+
requestPayload["messages"].splice(
327+
requestPayload["messages"].length,
328+
0,
329+
toolCallMessage,
330+
...toolCallResult,
331+
);
332+
setTimeout(() => {
333+
// call again
334+
console.log("start again");
335+
running = false;
336+
chatApi(chatPath, requestPayload); // call fetchEventSource
337+
}, 0);
338+
});
339+
console.log("try run tools", runTools.length, finished);
340+
return;
341+
}
342+
if (running) {
343+
return;
344+
}
281345
finished = true;
282346
options.onFinish(responseText + remainText);
283347
}
284348
};
285349

286350
controller.signal.onabort = finish;
287351

288-
fetchEventSource(chatPath, {
289-
...chatPayload,
290-
async onopen(res) {
291-
clearTimeout(requestTimeoutId);
292-
const contentType = res.headers.get("content-type");
293-
console.log(
294-
"[OpenAI] request response content type: ",
295-
contentType,
296-
);
297-
298-
if (contentType?.startsWith("text/plain")) {
299-
responseText = await res.clone().text();
300-
return finish();
301-
}
352+
function chatApi(chatPath, requestPayload) {
353+
const chatPayload = {
354+
method: "POST",
355+
body: JSON.stringify({
356+
...requestPayload,
357+
// TODO 这里暂时写死的,后面从store.tools中按照当前session中选择的获取
358+
tools: [
359+
{
360+
type: "function",
361+
function: {
362+
name: "get_current_weather",
363+
description: "Get the current weather",
364+
parameters: {
365+
type: "object",
366+
properties: {
367+
location: {
368+
type: "string",
369+
description:
370+
"The city and country, eg. San Francisco, USA",
371+
},
372+
format: {
373+
type: "string",
374+
enum: ["celsius", "fahrenheit"],
375+
},
376+
},
377+
required: ["location", "format"],
378+
},
379+
},
380+
},
381+
],
382+
}),
383+
signal: controller.signal,
384+
headers: getHeaders(),
385+
};
386+
console.log("chatApi", chatPath, requestPayload, chatPayload);
387+
fetchEventSource(chatPath, {
388+
...chatPayload,
389+
async onopen(res) {
390+
clearTimeout(requestTimeoutId);
391+
const contentType = res.headers.get("content-type");
392+
console.log(
393+
"[OpenAI] request response content type: ",
394+
contentType,
395+
);
396+
397+
if (contentType?.startsWith("text/plain")) {
398+
responseText = await res.clone().text();
399+
return finish();
400+
}
302401

303-
if (
304-
!res.ok ||
305-
!res.headers
306-
.get("content-type")
307-
?.startsWith(EventStreamContentType) ||
308-
res.status !== 200
309-
) {
310-
const responseTexts = [responseText];
311-
let extraInfo = await res.clone().text();
312-
try {
313-
const resJson = await res.clone().json();
314-
extraInfo = prettyObject(resJson);
315-
} catch {}
402+
if (
403+
!res.ok ||
404+
!res.headers
405+
.get("content-type")
406+
?.startsWith(EventStreamContentType) ||
407+
res.status !== 200
408+
) {
409+
const responseTexts = [responseText];
410+
let extraInfo = await res.clone().text();
411+
try {
412+
const resJson = await res.clone().json();
413+
extraInfo = prettyObject(resJson);
414+
} catch {}
316415

317-
if (res.status === 401) {
318-
responseTexts.push(Locale.Error.Unauthorized);
319-
}
416+
if (res.status === 401) {
417+
responseTexts.push(Locale.Error.Unauthorized);
418+
}
320419

321-
if (extraInfo) {
322-
responseTexts.push(extraInfo);
323-
}
420+
if (extraInfo) {
421+
responseTexts.push(extraInfo);
422+
}
324423

325-
responseText = responseTexts.join("\n\n");
424+
responseText = responseTexts.join("\n\n");
326425

327-
return finish();
328-
}
329-
},
330-
onmessage(msg) {
331-
if (msg.data === "[DONE]" || finished) {
332-
return finish();
333-
}
334-
const text = msg.data;
335-
try {
336-
const json = JSON.parse(text);
337-
const choices = json.choices as Array<{
338-
delta: { content: string };
339-
}>;
340-
const delta = choices[0]?.delta?.content;
341-
const textmoderation = json?.prompt_filter_results;
342-
343-
if (delta) {
344-
remainText += delta;
426+
return finish();
345427
}
346-
347-
if (
348-
textmoderation &&
349-
textmoderation.length > 0 &&
350-
ServiceProvider.Azure
351-
) {
352-
const contentFilterResults =
353-
textmoderation[0]?.content_filter_results;
354-
console.log(
355-
`[${ServiceProvider.Azure}] [Text Moderation] flagged categories result:`,
356-
contentFilterResults,
357-
);
428+
},
429+
onmessage(msg) {
430+
if (msg.data === "[DONE]" || finished) {
431+
return finish();
358432
}
359-
} catch (e) {
360-
console.error("[Request] parse error", text, msg);
361-
}
362-
},
363-
onclose() {
364-
finish();
365-
},
366-
onerror(e) {
367-
options.onError?.(e);
368-
throw e;
369-
},
370-
openWhenHidden: true,
371-
});
433+
const text = msg.data;
434+
try {
435+
const json = JSON.parse(text);
436+
const choices = json.choices as Array<{
437+
delta: { content: string };
438+
}>;
439+
console.log("choices", choices);
440+
const delta = choices[0]?.delta?.content;
441+
const tool_calls = choices[0]?.delta?.tool_calls;
442+
const textmoderation = json?.prompt_filter_results;
443+
444+
if (delta) {
445+
remainText += delta;
446+
}
447+
if (tool_calls?.length > 0) {
448+
const index = tool_calls[0]?.index;
449+
const id = tool_calls[0]?.id;
450+
const args = tool_calls[0]?.function?.arguments;
451+
if (id) {
452+
runTools.push({
453+
id,
454+
type: tool_calls[0]?.type,
455+
function: {
456+
name: tool_calls[0]?.function?.name,
457+
arguments: args,
458+
},
459+
});
460+
} else {
461+
runTools[index]["function"]["arguments"] += args;
462+
}
463+
}
464+
465+
console.log("runTools", runTools);
466+
467+
if (
468+
textmoderation &&
469+
textmoderation.length > 0 &&
470+
ServiceProvider.Azure
471+
) {
472+
const contentFilterResults =
473+
textmoderation[0]?.content_filter_results;
474+
console.log(
475+
`[${ServiceProvider.Azure}] [Text Moderation] flagged categories result:`,
476+
contentFilterResults,
477+
);
478+
}
479+
} catch (e) {
480+
console.error("[Request] parse error", text, msg);
481+
}
482+
},
483+
onclose() {
484+
finish();
485+
},
486+
onerror(e) {
487+
options.onError?.(e);
488+
throw e;
489+
},
490+
openWhenHidden: true,
491+
});
492+
}
493+
chatApi(chatPath, requestPayload); // call fetchEventSource
372494
} else {
373495
const res = await fetch(chatPath, chatPayload);
374496
clearTimeout(requestTimeoutId);

app/components/chat.module.scss

+16-1
Original file line numberDiff line numberDiff line change
@@ -413,6 +413,21 @@
413413
margin-top: 5px;
414414
}
415415

416+
.chat-message-tools {
417+
font-size: 12px;
418+
color: #aaa;
419+
line-height: 1.5;
420+
margin-top: 5px;
421+
.chat-message-tool {
422+
display: inline-flex;
423+
align-items: end;
424+
svg {
425+
margin-left: 5px;
426+
margin-right: 5px;
427+
}
428+
}
429+
}
430+
416431
.chat-message-item {
417432
box-sizing: border-box;
418433
max-width: 100%;
@@ -630,4 +645,4 @@
630645
.chat-input-send {
631646
bottom: 30px;
632647
}
633-
}
648+
}

0 commit comments

Comments
 (0)