feat: 引入prettier
This commit is contained in:
@@ -1,8 +1,13 @@
|
||||
import type { IChatWithLLMRequest, ModelInfo, ModelListInfo, UsageInfo } from "@/interfaces";
|
||||
import type {
|
||||
IChatWithLLMRequest,
|
||||
ModelInfo,
|
||||
ModelListInfo,
|
||||
UsageInfo
|
||||
} from "@/interfaces";
|
||||
import { ChatService } from "@/services";
|
||||
|
||||
export const useChatStore = defineStore("chat", () => {
|
||||
const token = ("sk-fkGVZBrAqvIxLjlF3b5f19EfBb63486c90Fa5a1fBd7076Ee");
|
||||
const token = "sk-fkGVZBrAqvIxLjlF3b5f19EfBb63486c90Fa5a1fBd7076Ee";
|
||||
// 默认模型
|
||||
const modelInfo = ref<ModelInfo | null>(null);
|
||||
// 历史消息
|
||||
@@ -16,23 +21,25 @@ export const useChatStore = defineStore("chat", () => {
|
||||
const chatWithLLM = async (
|
||||
request: IChatWithLLMRequest,
|
||||
onProgress: (content: string) => void, // 接收进度回调
|
||||
getUsageInfo: (object: UsageInfo) => void = () => { },
|
||||
getUsageInfo: (object: UsageInfo) => void = () => {}
|
||||
) => {
|
||||
if (completing.value)
|
||||
throw new Error("正在响应中");
|
||||
if (completing.value) throw new Error("正在响应中");
|
||||
|
||||
completing.value = true; // 开始请求
|
||||
try {
|
||||
await ChatService.ChatWithLLM(token, request, (content) => {
|
||||
onProgress(content);
|
||||
}, (object: UsageInfo) => {
|
||||
getUsageInfo(object);
|
||||
});
|
||||
}
|
||||
catch (error) {
|
||||
await ChatService.ChatWithLLM(
|
||||
token,
|
||||
request,
|
||||
(content) => {
|
||||
onProgress(content);
|
||||
},
|
||||
(object: UsageInfo) => {
|
||||
getUsageInfo(object);
|
||||
}
|
||||
);
|
||||
} catch (error) {
|
||||
console.error("请求失败:", error);
|
||||
}
|
||||
finally {
|
||||
} finally {
|
||||
completing.value = false;
|
||||
}
|
||||
};
|
||||
@@ -40,12 +47,11 @@ export const useChatStore = defineStore("chat", () => {
|
||||
// 添加消息到历史记录
|
||||
const addMessageToHistory = (message: string) => {
|
||||
const content = message.trim();
|
||||
if (!content)
|
||||
return;
|
||||
if (!content) return;
|
||||
|
||||
historyMessages.value.push({
|
||||
role: "user",
|
||||
content,
|
||||
content
|
||||
});
|
||||
};
|
||||
|
||||
@@ -54,39 +60,51 @@ export const useChatStore = defineStore("chat", () => {
|
||||
historyMessages.value = [];
|
||||
};
|
||||
|
||||
watch(historyMessages, (newVal) => {
|
||||
// 当历史消息变化时,发送请求
|
||||
if (newVal.length > 0) {
|
||||
const lastMessage = newVal[newVal.length - 1];
|
||||
if (lastMessage.role === "user" && modelInfo.value) {
|
||||
chatWithLLM({
|
||||
messages: newVal,
|
||||
model: modelInfo.value?.model_id,
|
||||
}, (content) => {
|
||||
// 处理进度回调
|
||||
if (
|
||||
historyMessages.value.length === 0
|
||||
|| historyMessages.value[historyMessages.value.length - 1].role !== "assistant"
|
||||
) {
|
||||
historyMessages.value.push({
|
||||
role: "assistant",
|
||||
content: "",
|
||||
});
|
||||
}
|
||||
historyMessages.value[historyMessages.value.length - 1].content = content;
|
||||
}, (usageInfo: UsageInfo) => {
|
||||
// 处理使用usage信息回调
|
||||
// 如果最后一条消息是助手的回复,则更新使用信息
|
||||
if (
|
||||
historyMessages.value.length > 0
|
||||
&& historyMessages.value[historyMessages.value.length - 1].role === "assistant"
|
||||
) {
|
||||
historyMessages.value[historyMessages.value.length - 1].usage = usageInfo;
|
||||
}
|
||||
});
|
||||
watch(
|
||||
historyMessages,
|
||||
(newVal) => {
|
||||
// 当历史消息变化时,发送请求
|
||||
if (newVal.length > 0) {
|
||||
const lastMessage = newVal[newVal.length - 1];
|
||||
if (lastMessage.role === "user" && modelInfo.value) {
|
||||
chatWithLLM(
|
||||
{
|
||||
messages: newVal,
|
||||
model: modelInfo.value?.model_id
|
||||
},
|
||||
(content) => {
|
||||
// 处理进度回调
|
||||
if (
|
||||
historyMessages.value.length === 0 ||
|
||||
historyMessages.value[historyMessages.value.length - 1].role !==
|
||||
"assistant"
|
||||
) {
|
||||
historyMessages.value.push({
|
||||
role: "assistant",
|
||||
content: ""
|
||||
});
|
||||
}
|
||||
historyMessages.value[historyMessages.value.length - 1].content =
|
||||
content;
|
||||
},
|
||||
(usageInfo: UsageInfo) => {
|
||||
// 处理使用usage信息回调
|
||||
// 如果最后一条消息是助手的回复,则更新使用信息
|
||||
if (
|
||||
historyMessages.value.length > 0 &&
|
||||
historyMessages.value[historyMessages.value.length - 1].role ===
|
||||
"assistant"
|
||||
) {
|
||||
historyMessages.value[historyMessages.value.length - 1].usage =
|
||||
usageInfo;
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}, { deep: true });
|
||||
},
|
||||
{ deep: true }
|
||||
);
|
||||
|
||||
// 模型列表
|
||||
const modelList = ref<ModelListInfo[]>([]);
|
||||
@@ -96,11 +114,21 @@ export const useChatStore = defineStore("chat", () => {
|
||||
try {
|
||||
const response = await ChatService.GetModelList();
|
||||
modelList.value = response.data.data;
|
||||
}
|
||||
catch (error) {
|
||||
} catch (error) {
|
||||
console.error("获取模型列表失败:", error);
|
||||
}
|
||||
};
|
||||
|
||||
return { token, completing, chatWithLLM, historyMessages, addMessageToHistory, clearHistoryMessages, getModelList, modelList, modelInfo, onlineCount };
|
||||
return {
|
||||
token,
|
||||
completing,
|
||||
chatWithLLM,
|
||||
historyMessages,
|
||||
addMessageToHistory,
|
||||
clearHistoryMessages,
|
||||
getModelList,
|
||||
modelList,
|
||||
modelInfo,
|
||||
onlineCount
|
||||
};
|
||||
});
|
||||
|
||||
Reference in New Issue
Block a user