feat: 项目初始化、完成基本流式传输和语音识别功能

This commit is contained in:
2025-06-28 19:21:46 +08:00
commit d6f9cd7aed
91 changed files with 7827 additions and 0 deletions

View File

@@ -0,0 +1,94 @@
import type { IChatWithLLMRequest, ModelInfo, ModelListInfo } from "@/interfaces";
import { ChatService } from "@/services";
export const useChatStore = defineStore("chat", () => {
const token = ("sk-fkGVZBrAqvIxLjlF3b5f19EfBb63486c90Fa5a1fBd7076Ee");
// 默认模型
const modelInfo = ref<ModelInfo | null>(null);
// 历史消息
const historyMessages = ref<IChatWithLLMRequest["messages"]>([]);
// 是否正在响应
const completing = ref<boolean>(false);
// 在线人数
const onlineCount = ref<number>(0);
// 与 LLM 聊天
const chatWithLLM = async (
request: IChatWithLLMRequest,
onProgress: (content: string) => void, // 接收进度回调
) => {
if (completing.value)
throw new Error("正在响应中");
completing.value = true; // 开始请求
try {
await ChatService.ChatWithLLM(token, request, (content) => {
onProgress(content);
});
}
catch (error) {
console.error("请求失败:", error);
}
finally {
completing.value = false;
}
};
// 添加消息到历史记录
const addMessageToHistory = (message: string) => {
const content = message.trim();
if (!content)
return;
historyMessages.value.push({
role: "user",
content,
});
};
// 清除历史消息
const clearHistoryMessages = () => {
historyMessages.value = [];
};
watch(historyMessages, (newVal) => {
// 当历史消息变化时,发送请求
if (newVal.length > 0) {
const lastMessage = newVal[newVal.length - 1];
if (lastMessage.role === "user" && modelInfo.value) {
chatWithLLM({
messages: newVal,
model: modelInfo.value?.model_id,
}, (content) => {
// 处理进度回调
if (
historyMessages.value.length === 0
|| historyMessages.value[historyMessages.value.length - 1].role !== "assistant"
) {
historyMessages.value.push({
role: "assistant",
content: "",
});
}
historyMessages.value[historyMessages.value.length - 1].content = content;
});
}
}
}, { deep: true });
// 模型列表
const modelList = ref<ModelListInfo[]>([]);
// 获取模型列表
const getModelList = async () => {
try {
const response = await ChatService.GetModelList();
modelList.value = response.data.data;
}
catch (error) {
console.error("获取模型列表失败:", error);
}
};
return { token, completing, chatWithLLM, historyMessages, addMessageToHistory, clearHistoryMessages, getModelList, modelList, modelInfo, onlineCount };
});