feat:精简
Some checks failed
Create and publish Docker images with specific build args / build-main-image (linux/amd64, ubuntu-latest) (push) Has been cancelled
Create and publish Docker images with specific build args / build-main-image (linux/arm64, ubuntu-24.04-arm) (push) Has been cancelled
Create and publish Docker images with specific build args / build-cuda-image (linux/amd64, ubuntu-latest) (push) Has been cancelled
Create and publish Docker images with specific build args / build-cuda-image (linux/arm64, ubuntu-24.04-arm) (push) Has been cancelled
Create and publish Docker images with specific build args / build-cuda126-image (linux/amd64, ubuntu-latest) (push) Has been cancelled
Create and publish Docker images with specific build args / build-cuda126-image (linux/arm64, ubuntu-24.04-arm) (push) Has been cancelled
Create and publish Docker images with specific build args / build-ollama-image (linux/amd64, ubuntu-latest) (push) Has been cancelled
Create and publish Docker images with specific build args / build-ollama-image (linux/arm64, ubuntu-24.04-arm) (push) Has been cancelled
Create and publish Docker images with specific build args / build-slim-image (linux/amd64, ubuntu-latest) (push) Has been cancelled
Create and publish Docker images with specific build args / build-slim-image (linux/arm64, ubuntu-24.04-arm) (push) Has been cancelled
Python CI / Format Backend (3.11.x) (push) Has been cancelled
Python CI / Format Backend (3.12.x) (push) Has been cancelled
Frontend Build / Format & Build Frontend (push) Has been cancelled
Frontend Build / Frontend Unit Tests (push) Has been cancelled
Create and publish Docker images with specific build args / merge-main-images (push) Has been cancelled
Create and publish Docker images with specific build args / merge-cuda-images (push) Has been cancelled
Create and publish Docker images with specific build args / merge-cuda126-images (push) Has been cancelled
Create and publish Docker images with specific build args / merge-ollama-images (push) Has been cancelled
Create and publish Docker images with specific build args / merge-slim-images (push) Has been cancelled
Close inactive issues / close-issues (push) Has been cancelled

This commit is contained in:
2026-01-16 18:34:38 +08:00
parent 16263710d9
commit 11fcec9387
137 changed files with 68993 additions and 6435 deletions

View File

@@ -37,7 +37,6 @@ from open_webui.utils.payload import (
apply_model_params_to_body_openai,
apply_system_prompt_to_body,
)
from open_webui.utils.credit.usage import CreditDeduct
logging.basicConfig(stream=sys.stdout, level=GLOBAL_LOG_LEVEL)
log = logging.getLogger(__name__)
@@ -289,31 +288,12 @@ async def generate_function_chat_completion(
# Directly return if the response is a StreamingResponse
if isinstance(res, StreamingResponse):
with CreditDeduct(
user=user,
model_id=model_id,
body=form_data,
is_stream=True,
) as credit_deduct:
async for data in res.body_iterator:
credit_deduct.run(data)
yield data
yield credit_deduct.usage_message
async for data in res.body_iterator:
yield data
return
if isinstance(res, dict):
with CreditDeduct(
user=user,
model_id=model_id,
body=form_data,
is_stream=False,
) as credit_deduct:
credit_deduct.run(res)
res = credit_deduct.add_usage_to_resp(res)
yield f"data: {json.dumps(res)}\n\n"
yield f"data: {json.dumps(res)}\n\n"
return
except Exception as e:
@@ -321,41 +301,29 @@ async def generate_function_chat_completion(
yield f"data: {json.dumps({'error': {'detail': str(e)}})}\n\n"
return
with CreditDeduct(
user=user,
model_id=model_id,
body=form_data,
is_stream=True,
) as credit_deduct:
if isinstance(res, str):
message = openai_chat_chunk_message_template(
form_data["model"], res
)
yield f"data: {json.dumps(message)}\n\n"
if isinstance(res, str):
message = openai_chat_chunk_message_template(
form_data["model"], res
)
credit_deduct.run(message)
yield f"data: {json.dumps(message)}\n\n"
if isinstance(res, Iterator):
for line in res:
line = process_line(form_data, line)
yield line
if isinstance(res, Iterator):
for line in res:
line = process_line(form_data, line)
credit_deduct.run(line)
yield line
if isinstance(res, AsyncGenerator):
async for line in res:
line = process_line(form_data, line)
yield line
if isinstance(res, AsyncGenerator):
async for line in res:
line = process_line(form_data, line)
credit_deduct.run(line)
yield line
if isinstance(res, str) or isinstance(res, Generator):
finish_message = openai_chat_chunk_message_template(
form_data["model"], ""
)
finish_message["choices"][0]["finish_reason"] = "stop"
yield f"data: {json.dumps(finish_message)}\n\n"
yield "data: [DONE]"
yield credit_deduct.usage_message
if isinstance(res, str) or isinstance(res, Generator):
finish_message = openai_chat_chunk_message_template(
form_data["model"], ""
)
finish_message["choices"][0]["finish_reason"] = "stop"
yield f"data: {json.dumps(finish_message)}\n\n"
yield "data: [DONE]"
return StreamingResponse(stream_content(), media_type="text/event-stream")
else:
@@ -367,38 +335,17 @@ async def generate_function_chat_completion(
return {"error": {"detail": str(e)}}
async def to_stream(response):
with CreditDeduct(
user=user,
model_id=model_id,
body=form_data,
is_stream=True,
) as credit_deduct:
async for data in response.body_iterator:
credit_deduct.run(data)
yield data
yield credit_deduct.usage_message
async for data in response.body_iterator:
yield data
if isinstance(res, StreamingResponse):
return StreamingResponse(to_stream(res), media_type="text/event-stream")
with CreditDeduct(
user=user,
model_id=model_id,
body=form_data,
is_stream=False,
) as credit_deduct:
if isinstance(res, dict):
credit_deduct.run(res)
return credit_deduct.add_usage_to_resp(res)
if isinstance(res, dict):
return res
if isinstance(res, BaseModel):
res = res.model_dump()
credit_deduct.run(res)
return credit_deduct.add_usage_to_resp(res)
if isinstance(res, BaseModel):
return res.model_dump()
message = await get_message_content(res)
res = openai_chat_completion_message_template(form_data["model"], message)
credit_deduct.run(res)
return credit_deduct.add_usage_to_resp(res)
message = await get_message_content(res)
return openai_chat_completion_message_template(form_data["model"], message)