Skip to content

Commit

Permalink
refactor: remove unused Valves component and related code from chat c…
Browse files Browse the repository at this point in the history
…ontrols
  • Loading branch information
notguoxin committed Jan 7, 2025
1 parent ac0b4b0 commit 005c86d
Show file tree
Hide file tree
Showing 6 changed files with 2 additions and 652 deletions.
87 changes: 2 additions & 85 deletions backend/open_webui/routers/memories.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
from typing import Optional

from open_webui.models.memories import Memories, MemoryModel
from open_webui.retrieval.vector.connector import VECTOR_DB_CLIENT
from open_webui.utils.auth import get_verified_user
from open_webui.env import SRC_LOG_LEVELS

Expand Down Expand Up @@ -49,21 +48,7 @@ async def add_memory(
form_data: AddMemoryForm,
user=Depends(get_verified_user),
):
memory = Memories.insert_new_memory(user.id, form_data.content)

VECTOR_DB_CLIENT.upsert(
collection_name=f"user-memory-{user.id}",
items=[
{
"id": memory.id,
"text": memory.content,
"vector": request.app.state.EMBEDDING_FUNCTION(memory.content),
"metadata": {"created_at": memory.created_at},
}
],
)

return memory
return ""


############################
Expand All @@ -80,13 +65,7 @@ class QueryMemoryForm(BaseModel):
async def query_memory(
request: Request, form_data: QueryMemoryForm, user=Depends(get_verified_user)
):
results = VECTOR_DB_CLIENT.search(
collection_name=f"user-memory-{user.id}",
vectors=[request.app.state.EMBEDDING_FUNCTION(form_data.content)],
limit=form_data.k,
)

return results
return ""


############################
Expand All @@ -96,25 +75,6 @@ async def query_memory(
async def reset_memory_from_vector_db(
request: Request, user=Depends(get_verified_user)
):
VECTOR_DB_CLIENT.delete_collection(f"user-memory-{user.id}")

memories = Memories.get_memories_by_user_id(user.id)
VECTOR_DB_CLIENT.upsert(
collection_name=f"user-memory-{user.id}",
items=[
{
"id": memory.id,
"text": memory.content,
"vector": request.app.state.EMBEDDING_FUNCTION(memory.content),
"metadata": {
"created_at": memory.created_at,
"updated_at": memory.updated_at,
},
}
for memory in memories
],
)

return True


Expand All @@ -125,15 +85,6 @@ async def reset_memory_from_vector_db(

@router.delete("/delete/user", response_model=bool)
async def delete_memory_by_user_id(user=Depends(get_verified_user)):
result = Memories.delete_memories_by_user_id(user.id)

if result:
try:
VECTOR_DB_CLIENT.delete_collection(f"user-memory-{user.id}")
except Exception as e:
log.error(e)
return True

return False


Expand All @@ -153,38 +104,4 @@ async def update_memory_by_id(
if memory is None:
raise HTTPException(status_code=404, detail="Memory not found")

if form_data.content is not None:
VECTOR_DB_CLIENT.upsert(
collection_name=f"user-memory-{user.id}",
items=[
{
"id": memory.id,
"text": memory.content,
"vector": request.app.state.EMBEDDING_FUNCTION(memory.content),
"metadata": {
"created_at": memory.created_at,
"updated_at": memory.updated_at,
},
}
],
)

return memory


############################
# DeleteMemoryById
############################


@router.delete("/{memory_id}", response_model=bool)
async def delete_memory_by_id(memory_id: str, user=Depends(get_verified_user)):
result = Memories.delete_memory_by_id_and_user_id(memory_id, user.id)

if result:
VECTOR_DB_CLIENT.delete(
collection_name=f"user-memory-{user.id}", ids=[memory_id]
)
return True

return False
47 changes: 0 additions & 47 deletions backend/open_webui/utils/middleware.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,9 +37,6 @@
from open_webui.models.functions import Functions
from open_webui.models.models import Models

from open_webui.retrieval.utils import get_sources_from_files


from open_webui.utils.chat import generate_chat_completion
from open_webui.utils.task import (
get_task_model_id,
Expand Down Expand Up @@ -345,50 +342,6 @@ async def chat_completion_files_handler(
request: Request, body: dict, user: UserModel
) -> tuple[dict, dict[str, list]]:
sources = []

if files := body.get("metadata", {}).get("files", None):
try:
queries_response = await generate_queries(
request,
{
"model": body["model"],
"messages": body["messages"],
"type": "retrieval",
},
user,
)
queries_response = queries_response["choices"][0]["message"]["content"]

try:
bracket_start = queries_response.find("{")
bracket_end = queries_response.rfind("}") + 1

if bracket_start == -1 or bracket_end == -1:
raise Exception("No JSON object found in the response")

queries_response = queries_response[bracket_start:bracket_end]
queries_response = json.loads(queries_response)
except Exception as e:
queries_response = {"queries": [queries_response]}

queries = queries_response.get("queries", [])
except Exception as e:
queries = []

if len(queries) == 0:
queries = [get_last_user_message(body["messages"])]

sources = get_sources_from_files(
files=files,
queries=queries,
embedding_function=request.app.state.EMBEDDING_FUNCTION,
k=request.app.state.config.TOP_K,
reranking_function=request.app.state.rf,
r=request.app.state.config.RELEVANCE_THRESHOLD,
hybrid_search=request.app.state.config.ENABLE_RAG_HYBRID_SEARCH,
)

log.debug(f"rag_contexts:sources: {sources}")
return body, {"sources": sources}


Expand Down
Loading

0 comments on commit 005c86d

Please sign in to comment.