feat:精简
Some checks failed
Create and publish Docker images with specific build args / build-main-image (linux/amd64, ubuntu-latest) (push) Has been cancelled
Create and publish Docker images with specific build args / build-main-image (linux/arm64, ubuntu-24.04-arm) (push) Has been cancelled
Create and publish Docker images with specific build args / build-cuda-image (linux/amd64, ubuntu-latest) (push) Has been cancelled
Create and publish Docker images with specific build args / build-cuda-image (linux/arm64, ubuntu-24.04-arm) (push) Has been cancelled
Create and publish Docker images with specific build args / build-cuda126-image (linux/amd64, ubuntu-latest) (push) Has been cancelled
Create and publish Docker images with specific build args / build-cuda126-image (linux/arm64, ubuntu-24.04-arm) (push) Has been cancelled
Create and publish Docker images with specific build args / build-ollama-image (linux/amd64, ubuntu-latest) (push) Has been cancelled
Create and publish Docker images with specific build args / build-ollama-image (linux/arm64, ubuntu-24.04-arm) (push) Has been cancelled
Create and publish Docker images with specific build args / build-slim-image (linux/amd64, ubuntu-latest) (push) Has been cancelled
Create and publish Docker images with specific build args / build-slim-image (linux/arm64, ubuntu-24.04-arm) (push) Has been cancelled
Python CI / Format Backend (3.11.x) (push) Has been cancelled
Python CI / Format Backend (3.12.x) (push) Has been cancelled
Frontend Build / Format & Build Frontend (push) Has been cancelled
Frontend Build / Frontend Unit Tests (push) Has been cancelled
Create and publish Docker images with specific build args / merge-main-images (push) Has been cancelled
Create and publish Docker images with specific build args / merge-cuda-images (push) Has been cancelled
Create and publish Docker images with specific build args / merge-cuda126-images (push) Has been cancelled
Create and publish Docker images with specific build args / merge-ollama-images (push) Has been cancelled
Create and publish Docker images with specific build args / merge-slim-images (push) Has been cancelled
Close inactive issues / close-issues (push) Has been cancelled

This commit is contained in:
2026-01-16 18:34:38 +08:00
parent 16263710d9
commit 11fcec9387
137 changed files with 68993 additions and 6435 deletions

View File

@@ -27,7 +27,6 @@ from open_webui.models.files import Files
from open_webui.models.knowledge import Knowledges
from open_webui.models.chats import Chats
from open_webui.models.notes import Notes
from open_webui.retrieval.vector.main import GetResult
from open_webui.utils.access_control import has_access
@@ -46,8 +45,6 @@ from open_webui.config import (
RAG_EMBEDDING_CONTENT_PREFIX,
RAG_EMBEDDING_PREFIX_FIELD_NAME,
)
from open_webui.utils.credit.usage import CreditDeduct
from open_webui.utils.credit.utils import check_credit_by_user_id
log = logging.getLogger(__name__)
@@ -540,10 +537,6 @@ async def agenerate_openai_batch_embeddings(
prefix: str = None,
user: UserModel = None,
) -> Optional[list[list[float]]]:
# check credit
if user:
check_credit_by_user_id(user_id=user.id, form_data={}, is_embedding=True)
try:
log.debug(
f"agenerate_openai_batch_embeddings:model {model} batch size: {len(texts)}"
@@ -566,26 +559,6 @@ async def agenerate_openai_batch_embeddings(
r.raise_for_status()
data = await r.json()
if user:
with CreditDeduct(
user=user,
model_id=model,
body={
"messages": [
{"role": "user", "content": form_data["input"]}
]
},
is_stream=False,
is_embedding=True,
) as credit_deduct:
if "usage" in data:
credit_deduct.is_official_usage = True
prompt_tokens = data["usage"]["prompt_tokens"]
credit_deduct.usage.prompt_tokens = prompt_tokens
credit_deduct.usage.total_tokens = prompt_tokens
else:
credit_deduct.run(form_data["input"])
if "data" in data:
return [item["embedding"] for item in data["data"]]
else:
@@ -604,10 +577,6 @@ async def agenerate_azure_openai_batch_embeddings(
prefix: str = None,
user: UserModel = None,
) -> Optional[list[list[float]]]:
# check credit
if user:
check_credit_by_user_id(user_id=user.id, form_data={}, is_embedding=True)
try:
log.debug(
f"agenerate_azure_openai_batch_embeddings:deployment {model} batch size: {len(texts)}"
@@ -630,22 +599,6 @@ async def agenerate_azure_openai_batch_embeddings(
r.raise_for_status()
data = await r.json()
input_text = str(form_data["input"])
with CreditDeduct(
user=user,
model_id=model,
body={"messages": [{"role": "user", "content": input_text}]},
is_stream=False,
is_embedding=True,
) as credit_deduct:
if "usage" in data:
credit_deduct.is_official_usage = True
prompt_tokens = data["usage"]["prompt_tokens"]
credit_deduct.usage.prompt_tokens = prompt_tokens
credit_deduct.usage.total_tokens = prompt_tokens
else:
credit_deduct.run(input_text)
if "data" in data:
return [item["embedding"] for item in data["data"]]
else:
@@ -663,10 +616,6 @@ async def agenerate_ollama_batch_embeddings(
prefix: str = None,
user: UserModel = None,
) -> Optional[list[list[float]]]:
# check credit
if user:
check_credit_by_user_id(user_id=user.id, form_data={}, is_embedding=True)
try:
log.debug(
f"agenerate_ollama_batch_embeddings:model {model} batch size: {len(texts)}"
@@ -689,16 +638,6 @@ async def agenerate_ollama_batch_embeddings(
r.raise_for_status()
data = await r.json()
input_text = str(form_data["input"])
with CreditDeduct(
user=user,
model_id=model,
body={"messages": [{"role": "user", "content": input_text}]},
is_stream=False,
is_embedding=True,
) as credit_deduct:
credit_deduct.run(input_text)
if "embeddings" in data:
return data["embeddings"]
else:
@@ -912,21 +851,6 @@ async def get_sources_from_items(
],
}
elif item.get("type") == "note":
# Note Attached
note = Notes.get_note_by_id(item.get("id"))
if note and (
user.role == "admin"
or note.user_id == user.id
or has_access(user.id, "read", note.access_control)
):
# User has access to the note
query_result = {
"documents": [[note.data.get("content", {}).get("md", "")]],
"metadatas": [[{"file_id": note.id, "name": note.title}]],
}
elif item.get("type") == "chat":
# Chat Attached
chat = Chats.get_chat_by_id(item.get("id"))