feat: 项目初始化、完成基本流式传输和语音识别功能
This commit is contained in:
0
backend/app/services/__init__.py
Normal file
0
backend/app/services/__init__.py
Normal file
BIN
backend/app/services/__pycache__/__init__.cpython-310.pyc
Normal file
BIN
backend/app/services/__pycache__/__init__.cpython-310.pyc
Normal file
Binary file not shown.
BIN
backend/app/services/__pycache__/llm_request.cpython-310.pyc
Normal file
BIN
backend/app/services/__pycache__/llm_request.cpython-310.pyc
Normal file
Binary file not shown.
20
backend/app/services/llm_request.py
Normal file
20
backend/app/services/llm_request.py
Normal file
@@ -0,0 +1,20 @@
|
||||
import httpx
|
||||
from typing import Callable, Awaitable, Optional
|
||||
|
||||
|
||||
# 流式请求LLm的方法
|
||||
async def stream_post_request(
|
||||
url,
|
||||
headers=None,
|
||||
json=None,
|
||||
chunk_handler: Optional[Callable[[bytes], Awaitable[bytes]]] = None
|
||||
):
|
||||
async with httpx.AsyncClient(http2=True) as client:
|
||||
async with client.stream(
|
||||
method="POST", url=url, headers=headers, json=json
|
||||
) as response:
|
||||
async for chunk in response.aiter_bytes():
|
||||
if chunk_handler:
|
||||
# 支持异步处理
|
||||
chunk = await chunk_handler(chunk)
|
||||
yield chunk
|
||||
Reference in New Issue
Block a user