Skip to content

feat: add persistency component to ChatInterface and cleanup message/conversation ids #556

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 29 commits into from
May 27, 2025
Merged
Show file tree
Hide file tree
Changes from 27 commits
Commits
Show all changes
29 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions examples/api/chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@

from ragbits.chat.interface import ChatInterface
from ragbits.chat.interface.forms import FeedbackConfig, FeedbackForm, FormField
from ragbits.chat.interface.types import ChatResponse, Message
from ragbits.chat.interface.types import ChatContext, ChatResponse, Message
from ragbits.core.llms import LiteLLM


Expand Down Expand Up @@ -50,7 +50,7 @@ async def chat(
self,
message: str,
history: list[Message] | None = None,
context: dict | None = None,
context: ChatContext | None = None,
) -> AsyncGenerator[ChatResponse, None]:
"""
Example implementation of the ChatInterface.
Expand Down
88 changes: 88 additions & 0 deletions examples/api/offline_chat.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
# /// script
# requires-python = ">=3.10"
# dependencies = [
# "ragbits-chat",
# ]
# ///
#
# To run this example execute following CLI command:
# ragbits api run examples.api.offline_chat:MyChat

import asyncio
from collections.abc import AsyncGenerator

from ragbits.chat.interface import ChatInterface
from ragbits.chat.interface.forms import FeedbackConfig, FeedbackForm, FormField
from ragbits.chat.interface.types import ChatContext, ChatResponse, Message
from ragbits.chat.persistence.file import FileHistoryPersistence


class MyChat(ChatInterface):
"""An offline example implementation of the ChatInterface that demonstrates different response types."""

history_persistence = FileHistoryPersistence(base_path="chat_history")

feedback_config = FeedbackConfig(
like_enabled=True,
like_form=FeedbackForm(
title="Like Form",
fields=[
FormField(name="like_reason", type="text", required=True, label="Why do you like this?"),
],
),
dislike_enabled=True,
dislike_form=FeedbackForm(
title="Dislike Form",
fields=[
FormField(
name="issue_type",
type="select",
required=True,
label="What was the issue?",
options=["Incorrect information", "Not helpful", "Unclear", "Other"],
),
FormField(name="feedback", type="text", required=True, label="Please provide more details"),
],
),
)

@staticmethod
async def _generate_response(message: str) -> AsyncGenerator[str, None]:
"""Simple response generator that simulates streaming text."""
# Simple echo response with some additional text
response = f"I received your message: '{message}'. This is an offline response."

# Simulate streaming by yielding character by character
for char in response:
yield char
await asyncio.sleep(0.05) # Add small delay to simulate streaming

async def chat(
self,
message: str,
history: list[Message] | None = None,
context: ChatContext | None = None,
) -> AsyncGenerator[ChatResponse, None]:
"""
Offline implementation of the ChatInterface.

Args:
message: The current user message
history: Optional list of previous messages in the conversation
context: Optional context

Yields:
ChatResponse objects containing different types of content:
- Text chunks for the actual response
- Reference documents used to generate the response
"""
# Example of yielding a reference
yield self.create_reference(
title="Offline Reference",
content="This is an example reference document that might be relevant to your query.",
url="https://example.com/offline-reference",
)

# Generate and yield text chunks
async for chunk in self._generate_response(message):
yield self.create_text_response(chunk)
2 changes: 2 additions & 0 deletions packages/ragbits-chat/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@

## Unreleased

- Add persistance component to save chat interactions from ragbits-chat (#556)
- Add conversation_id parameter to chat interface context (#556)
- Add uvicorn to dependencies (#578)
- Remove HeroUI Pro components (#557)

Expand Down
60 changes: 17 additions & 43 deletions packages/ragbits-chat/src/ragbits/chat/api.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import importlib
import json
import logging
import uuid
from collections.abc import AsyncGenerator
from pathlib import Path
from typing import Any, Literal
Expand All @@ -15,7 +14,7 @@
from pydantic import BaseModel, Field

from ragbits.chat.interface import ChatInterface
from ragbits.chat.interface.types import ChatResponse, Message
from ragbits.chat.interface.types import ChatContext, ChatResponse, Message

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -112,33 +111,31 @@ async def chat_message(request: ChatMessageRequest) -> StreamingResponse:
if not self.chat_interface:
raise HTTPException(status_code=500, detail="Chat implementation is not initialized")

# Generate a unique message ID for this conversation message
message_id = str(uuid.uuid4())
# Convert request context to ChatContext
chat_context = ChatContext(**request.context)

# Verify state signature if provided
if "state" in request.context and "signature" in request.context:
state = request.context["state"]
signature = request.context["signature"]
if not ChatInterface.verify_state(state, signature):
logger.warning(f"Invalid state signature received for message {message_id}")
logger.warning(f"Invalid state signature received for message {chat_context.message_id}")
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Invalid state signature",
)
# Remove the signature from context after verification
del request.context["signature"]
# Ensure context has a state field if not present
elif "state" not in request.context:
request.context["state"] = {}
# Remove the signature from context after verification (it's already parsed into ChatContext)

# Get the response generator from the chat interface
response_generator = self.chat_interface.chat(
message=request.message, history=[msg.model_dump() for msg in request.history], context=request.context
message=request.message,
history=[msg.model_dump() for msg in request.history],
context=chat_context,
)

# Pass the generator to the SSE formatter
return StreamingResponse(
RagbitsAPI._chat_response_to_sse(response_generator, message_id, self.chat_interface),
RagbitsAPI._chat_response_to_sse(response_generator),
media_type="text/event-stream",
)

Expand Down Expand Up @@ -179,46 +176,23 @@ async def config() -> JSONResponse:

@staticmethod
async def _chat_response_to_sse(
responses: AsyncGenerator[ChatResponse], message_id: str, chat_interface: ChatInterface | None = None
responses: AsyncGenerator[ChatResponse],
) -> AsyncGenerator[str, None]:
"""
Formats chat responses into Server-Sent Events (SSE) format for streaming to the client.
Each response is converted to JSON and wrapped in the SSE 'data:' prefix.

Args:
responses: The chat response generator
message_id: The unique identifier for this message
chat_interface: The chat interface instance to use for verifying state (optional)
"""
# Send the message_id as the first SSE event
data = json.dumps({"type": "message_id", "content": message_id})
yield f"data: {data}\n\n"

async for response in responses:
if response.type.value == "state_update":
state_update = response.as_state_update()
if state_update:
# Verification is already done by the chat interface that created the state update
data = json.dumps(
{
"type": "state_update",
"content": {
"state": state_update.state,
"signature": state_update.signature,
},
}
)
yield f"data: {data}\n\n"
else:
data = json.dumps(
{
"type": response.type.value,
"content": response.content
if isinstance(response.content, str)
else response.content.model_dump(),
}
)
yield f"data: {data}\n\n"
data = json.dumps(
{
"type": response.type.value,
"content": response.content if isinstance(response.content, str) else response.content.model_dump(),
}
)
yield f"data: {data}\n\n"

@staticmethod
def _load_chat_interface(implementation: type[ChatInterface] | str) -> ChatInterface:
Expand Down

This file was deleted.

56 changes: 0 additions & 56 deletions packages/ragbits-chat/src/ragbits/chat/history/stores/base.py

This file was deleted.

Loading