feat: 项目初始化、完成基本流式传输和语音识别功能

This commit is contained in:
2025-06-28 19:21:46 +08:00
commit d6f9cd7aed
91 changed files with 7827 additions and 0 deletions

154
web/src/stores/asr_store.ts Normal file
View File

@@ -0,0 +1,154 @@
import { useWebSocketStore } from "@/services";
import { convertToPCM16 } from "@/utils";
export const useAsrStore = defineStore("asr", () => {
// 是否正在录音
const isRecording = ref(false);
// 识别结果消息列表
const messages = ref<string[]>([]);
// 音频相关对象
let audioContext: AudioContext | null = null;
let mediaStreamSource: MediaStreamAudioSourceNode | null = null;
let workletNode: AudioWorkletNode | null = null;
// 获取 WebSocket store 实例
const webSocketStore = useWebSocketStore();
/**
* 发送消息到 WebSocket
* @param data 字符串或二进制数据
*/
const sendMessage = (data: string | Uint8Array) => {
// 仅在连接已建立时发送
if (webSocketStore.connected) {
if (typeof data === "string") {
webSocketStore.send(data);
}
else {
webSocketStore.websocket?.send(data);
}
}
};
// AudioWorklet 处理器代码,作为字符串
const audioProcessorCode = `
class AudioProcessor extends AudioWorkletProcessor {
process(inputs, outputs, parameters) {
const input = inputs[0]
if (input.length > 0) {
const inputChannel = input[0]
// 发送音频数据到主线程
this.port.postMessage({
type: 'audiodata',
data: inputChannel
})
}
return true
}
}
registerProcessor('audio-processor', AudioProcessor)
`;
/**
* 开始录音
*/
const startRecording = async () => {
if (isRecording.value)
return;
messages.value = [];
// 确保 WebSocket 已连接
if (!webSocketStore.connected) {
webSocketStore.connect();
// 等待连接建立
await new Promise<void>((resolve) => {
const check = () => {
if (webSocketStore.connected)
resolve();
else setTimeout(check, 100);
};
check();
});
}
try {
// 获取麦克风音频流
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
// 创建音频上下文采样率16kHz
audioContext = new (window.AudioContext || (window as any).webkitAudioContext)({
sampleRate: 16000,
});
// 用Blob方式创建AudioWorklet模块的URL
const blob = new Blob([audioProcessorCode], { type: "application/javascript" });
const processorUrl = URL.createObjectURL(blob);
// 加载AudioWorklet模块
await audioContext.audioWorklet.addModule(processorUrl);
// 释放URL对象防止内存泄漏
URL.revokeObjectURL(processorUrl);
// 创建音频源节点
mediaStreamSource = audioContext.createMediaStreamSource(stream);
// 创建AudioWorkletNode
workletNode = new AudioWorkletNode(audioContext, "audio-processor", {
numberOfInputs: 1,
numberOfOutputs: 1,
channelCount: 1,
});
// 监听来自AudioWorklet的音频数据
workletNode.port.onmessage = (event) => {
if (event.data.type === "audiodata") {
// 转换为16位PCM格式
const pcmData = convertToPCM16(event.data.data);
// 发送PCM数据到WebSocket
sendMessage(pcmData);
}
};
// 连接音频节点
mediaStreamSource.connect(workletNode);
workletNode.connect(audioContext.destination);
isRecording.value = true;
}
catch (err) {
// 麦克风权限失败或AudioWorklet加载失败
console.error("需要麦克风权限才能录音", err);
}
};
/**
* 停止录音
*/
const stopRecording = () => {
if (!isRecording.value)
return;
// 通知后端录音结束
sendMessage(JSON.stringify({ type: "asr_end" }));
// 停止所有音轨
if (mediaStreamSource?.mediaStream) {
const tracks = mediaStreamSource.mediaStream.getTracks();
tracks.forEach(track => track.stop());
}
// 断开音频节点
workletNode?.disconnect();
mediaStreamSource?.disconnect();
setTimeout(() => {
// TODO: 临时写法,这里的更新状态需要调整
// 确保在停止录音后延迟更新状态因为要等待LLM请求
isRecording.value = false;
}, 300);
// 释放音频资源
audioContext?.close().then(() => {
audioContext = null;
mediaStreamSource = null;
workletNode = null;
});
};
return {
isRecording,
messages,
startRecording,
stopRecording,
sendMessage,
};
});

View File

@@ -0,0 +1,94 @@
import type { IChatWithLLMRequest, ModelInfo, ModelListInfo } from "@/interfaces";
import { ChatService } from "@/services";
export const useChatStore = defineStore("chat", () => {
const token = ("sk-fkGVZBrAqvIxLjlF3b5f19EfBb63486c90Fa5a1fBd7076Ee");
// 默认模型
const modelInfo = ref<ModelInfo | null>(null);
// 历史消息
const historyMessages = ref<IChatWithLLMRequest["messages"]>([]);
// 是否正在响应
const completing = ref<boolean>(false);
// 在线人数
const onlineCount = ref<number>(0);
// 与 LLM 聊天
const chatWithLLM = async (
request: IChatWithLLMRequest,
onProgress: (content: string) => void, // 接收进度回调
) => {
if (completing.value)
throw new Error("正在响应中");
completing.value = true; // 开始请求
try {
await ChatService.ChatWithLLM(token, request, (content) => {
onProgress(content);
});
}
catch (error) {
console.error("请求失败:", error);
}
finally {
completing.value = false;
}
};
// 添加消息到历史记录
const addMessageToHistory = (message: string) => {
const content = message.trim();
if (!content)
return;
historyMessages.value.push({
role: "user",
content,
});
};
// 清除历史消息
const clearHistoryMessages = () => {
historyMessages.value = [];
};
watch(historyMessages, (newVal) => {
// 当历史消息变化时,发送请求
if (newVal.length > 0) {
const lastMessage = newVal[newVal.length - 1];
if (lastMessage.role === "user" && modelInfo.value) {
chatWithLLM({
messages: newVal,
model: modelInfo.value?.model_id,
}, (content) => {
// 处理进度回调
if (
historyMessages.value.length === 0
|| historyMessages.value[historyMessages.value.length - 1].role !== "assistant"
) {
historyMessages.value.push({
role: "assistant",
content: "",
});
}
historyMessages.value[historyMessages.value.length - 1].content = content;
});
}
}
}, { deep: true });
// 模型列表
const modelList = ref<ModelListInfo[]>([]);
// 获取模型列表
const getModelList = async () => {
try {
const response = await ChatService.GetModelList();
modelList.value = response.data.data;
}
catch (error) {
console.error("获取模型列表失败:", error);
}
};
return { token, completing, chatWithLLM, historyMessages, addMessageToHistory, clearHistoryMessages, getModelList, modelList, modelInfo, onlineCount };
});

2
web/src/stores/index.ts Normal file
View File

@@ -0,0 +1,2 @@
export * from "./asr_store"
export * from "./chat_store"

View File

@@ -0,0 +1,5 @@
export const useUserStore = defineStore("user", () => {
return {
};
});