Skip to content

Commit

Permalink
feat: chat list api
Browse files Browse the repository at this point in the history
  • Loading branch information
hyacinthus committed Feb 26, 2025
1 parent 290cb08 commit 00eeb7d
Show file tree
Hide file tree
Showing 5 changed files with 236 additions and 16 deletions.
129 changes: 129 additions & 0 deletions app/entrypoints/web.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
)
from fastapi.responses import PlainTextResponse
from fastapi.security import HTTPBasic, HTTPBasicCredentials
from pydantic import BaseModel, Field
from sqlmodel import desc, select
from sqlmodel.ext.asyncio.session import AsyncSession

Expand All @@ -27,6 +28,7 @@
from models.agent import Agent, AgentData, AgentQuota
from models.chat import (
AuthorType,
Chat,
ChatMessage,
ChatMessageRequest,
)
Expand Down Expand Up @@ -617,3 +619,130 @@ async def create_chat(
if isinstance(e, HTTPException):
raise
raise HTTPException(status_code=500, detail=str(e))


@chat_router_readonly.get(
"/agents/{aid}/chats",
response_model=List[Chat],
summary="Get chat list by agent and user",
tags=["Chat"],
)
async def get_agent_chats(
aid: str = Path(..., description="Agent ID"),
user_id: str = Query(..., description="User ID"),
):
"""Get chat list for a specific agent and user.
**Parameters:**
* `aid` - Agent ID
* `user_id` - User ID
**Returns:**
* `List[Chat]` - List of chats for the specified agent and user
**Raises:**
* `404` - Agent not found
"""
# Verify agent exists
agent = await Agent.get(aid)
if not agent:
raise HTTPException(status_code=404, detail="Agent not found")

# Get chats by agent and user
chats = await Chat.get_by_agent_user(aid, user_id)
return chats


class ChatSummaryUpdate(BaseModel):
"""Request model for updating chat summary."""

summary: str = Field(
...,
description="New summary text for the chat",
examples=["User asked about product features and pricing"],
min_length=1,
)


@chat_router.put(
"/agents/{aid}/chats/{chat_id}",
response_model=Chat,
summary="Update chat summary",
tags=["Chat"],
)
async def update_chat_summary(
update_data: ChatSummaryUpdate,
aid: str = Path(..., description="Agent ID"),
chat_id: str = Path(..., description="Chat ID"),
):
"""Update the summary of a specific chat.
**Parameters:**
* `aid` - Agent ID
* `chat_id` - Chat ID
* `update_data` - Summary update data (in request body)
**Returns:**
* `Chat` - Updated chat object
**Raises:**
* `404` - Agent or chat not found
"""
# Verify agent exists
agent = await Agent.get(aid)
if not agent:
raise HTTPException(status_code=404, detail="Agent not found")

# Get chat
chat = await Chat.get(chat_id)
if not chat:
raise HTTPException(status_code=404, detail="Chat not found")

# Verify chat belongs to agent
if chat.agent_id != aid:
raise HTTPException(status_code=404, detail="Chat not found for this agent")

# Update summary
updated_chat = await chat.update_summary(update_data.summary)
return updated_chat


@chat_router.delete(
"/agents/{aid}/chats/{chat_id}",
status_code=status.HTTP_204_NO_CONTENT,
summary="Delete a chat",
tags=["Chat"],
)
async def delete_chat(
aid: str = Path(..., description="Agent ID"),
chat_id: str = Path(..., description="Chat ID"),
):
"""Delete a specific chat.
**Parameters:**
* `aid` - Agent ID
* `chat_id` - Chat ID
**Returns:**
* `204 No Content` - Success
**Raises:**
* `404` - Agent or chat not found
"""
# Verify agent exists
agent = await Agent.get(aid)
if not agent:
raise HTTPException(status_code=404, detail="Agent not found")

# Get chat
chat = await Chat.get(chat_id)
if not chat:
raise HTTPException(status_code=404, detail="Chat not found")

# Verify chat belongs to agent
if chat.agent_id != aid:
raise HTTPException(status_code=404, detail="Chat not found for this agent")

# Delete chat
await chat.delete()
return Response(status_code=status.HTTP_204_NO_CONTENT)
103 changes: 102 additions & 1 deletion models/chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,12 @@
from enum import Enum
from typing import List, NotRequired, Optional, TypedDict

from fastapi import HTTPException
from pydantic import BaseModel, Field
from sqlalchemy import Column, DateTime, Index, String, func
from sqlalchemy.dialects.postgresql import JSONB
from sqlmodel import Field as SQLModelField
from sqlmodel import SQLModel
from sqlmodel import SQLModel, desc, select, update

from models.db import get_session

Expand Down Expand Up @@ -190,3 +191,103 @@ async def save(self):
db.add(self)
await db.commit()
await db.refresh(self)


class Chat(SQLModel, table=True):
"""Chat model."""

__tablename__ = "chats"
__table_args__ = (Index("ix_chats_agent_user", "agent_id", "user_id"),)

id: str = SQLModelField(
primary_key=True,
description="Unique identifier for the chat",
)
agent_id: str = SQLModelField(
description="ID of the agent this chat belongs to",
)
user_id: str = SQLModelField(
description="User ID of the chat",
)
summary: str = SQLModelField(
default="",
description="Summary of the chat",
)
rounds: int = SQLModelField(
default=0,
description="Number of rounds in the chat",
)
created_at: datetime = SQLModelField(
default_factory=lambda: datetime.now(timezone.utc),
sa_type=DateTime(timezone=True),
sa_column_kwargs={"server_default": func.now()},
nullable=False,
description="Timestamp when this chat was created",
)
updated_at: datetime = SQLModelField(
default_factory=lambda: datetime.now(timezone.utc),
sa_type=DateTime(timezone=True),
sa_column_kwargs={
"onupdate": lambda: datetime.now(timezone.utc),
},
nullable=False,
description="Timestamp when this chat was updated",
)

@classmethod
async def get(cls, id: str) -> "Chat":
async with get_session() as db:
chat = await db.get(cls, id)
if not chat:
raise HTTPException(status_code=404, detail="Chat not found")
return chat

async def create(self):
async with get_session() as db:
db.add(self)
await db.commit()
await db.refresh(self)

async def delete(self):
async with get_session() as db:
db.delete(self)
await db.commit()

async def add_round(self):
"""Increment the number of rounds in the chat on the database server.
Uses a direct SQL UPDATE statement to increment the rounds counter
on the server side, avoiding potential race conditions.
"""
async with get_session() as db:
stmt = update(Chat).where(Chat.id == self.id).values(rounds=Chat.rounds + 1)
await db.exec(stmt)
await db.commit()

async def update_summary(self, summary: str) -> "Chat":
"""Update the chat summary in the database.
Uses a direct SQL UPDATE statement to set the summary field.
Args:
summary: New summary text for the chat
"""
async with get_session() as db:
stmt = update(Chat).where(Chat.id == self.id).values(summary=summary)
await db.exec(stmt)
await db.commit()
# Refresh the local object to reflect the database change
self.summary = summary
return self

@classmethod
async def get_by_agent_user(cls, agent_id: str, user_id: str) -> List["Chat"]:
async with get_session() as db:
return (
await db.exec(
select(cls)
.order_by(desc(cls.updated_at))
.limit(10)
.where(cls.agent_id == agent_id, cls.user_id == user_id)
)
).all()
12 changes: 3 additions & 9 deletions skills/elfa/mention.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,8 +114,7 @@ class ElfaGetMentions(ElfaBaseTool):
"""

name: str = "elfa_get_mentions"
description: str = (
"""This tool uses the Elfa AI API to query hourly-updated tweets from "smart accounts" – accounts identified as influential or relevant – that have received at least 10 interactions (comments, retweets, quote tweets).
description: str = """This tool uses the Elfa AI API to query hourly-updated tweets from "smart accounts" – accounts identified as influential or relevant – that have received at least 10 interactions (comments, retweets, quote tweets).
This tool is useful for:
Expand All @@ -125,7 +124,6 @@ class ElfaGetMentions(ElfaBaseTool):
* **Reputation Management:** Identify and address potential PR issues.
The data returned includes the tweet content, timestamp, and potentially other relevant metadata."""
)
args_schema: Type[BaseModel] = ElfaGetMentionsInput

def _run(self) -> ElfaGetMentionsOutput:
Expand Down Expand Up @@ -255,8 +253,7 @@ class ElfaGetTopMentions(ElfaBaseTool):
"""

name: str = "elfa_get_top_mentions"
description: str = (
"""This tool uses the Elfa API to query tweets mentioning a specific stock ticker. The tweets are ranked by view count, providing insight into the most visible and potentially influential discussions surrounding the stock. The results are updated hourly, allowing for real-time monitoring of market sentiment.
description: str = """This tool uses the Elfa API to query tweets mentioning a specific stock ticker. The tweets are ranked by view count, providing insight into the most visible and potentially influential discussions surrounding the stock. The results are updated hourly, allowing for real-time monitoring of market sentiment.
This tool is useful for:
Expand All @@ -265,7 +262,6 @@ class ElfaGetTopMentions(ElfaBaseTool):
* **Investor Insights:** Monitor the conversations and opinions of investors and traders.
To use this tool, simply provide the stock ticker symbol (e.g., "AAPL", "TSLA"). The tool will return a list of tweets, ranked by view count."""
)
args_schema: Type[BaseModel] = ElfaGetTopMentionsInput

def _run(
Expand Down Expand Up @@ -409,8 +405,7 @@ class ElfaSearchMentions(ElfaBaseTool):
"""

name: str = "elfa_search_mentions"
description: str = (
"""This tool uses the Elfa API to search tweets mentioning up to five keywords. It can search within the past 30 days of data, which is updated every 5 minutes, or access up to six months of historical tweet data.
description: str = """This tool uses the Elfa API to search tweets mentioning up to five keywords. It can search within the past 30 days of data, which is updated every 5 minutes, or access up to six months of historical tweet data.
This tool is useful for:
Expand All @@ -420,7 +415,6 @@ class ElfaSearchMentions(ElfaBaseTool):
* **Competitive Analysis:** See what people are saying about your competitors.
To use this tool, provide up to five keywords. You can also specify whether you want to search recent or historical tweets."""
)
args_schema: Type[BaseModel] = ElfaSearchMentionsInput

def _run(
Expand Down
4 changes: 1 addition & 3 deletions skills/elfa/stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,8 +51,7 @@ class ElfaGetSmartStats(ElfaBaseTool):
"""

name: str = "elfa_get_smart_stats"
description: str = (
"""This tool uses the Elfa API to retrieve key social media metrics for a given username. These metrics include:
description: str = """This tool uses the Elfa API to retrieve key social media metrics for a given username. These metrics include:
* **Smart Following Count:** A metric representing the number of high-quality or influential followers.
* **Engagement Score:** A composite score reflecting the level of interaction with the user's content.
Expand All @@ -64,7 +63,6 @@ class ElfaGetSmartStats(ElfaBaseTool):
* **Influencer Identification:** Identify influential users in your niche.
* **Social Media Audits:** Assess the overall health and effectiveness of a social media presence.
"""
)
args_schema: Type[BaseModel] = ElfaGetSmartStatsInput

def _run(self, username: str) -> ElfaGetSmartStatsOutput:
Expand Down
4 changes: 1 addition & 3 deletions skills/elfa/tokens.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,8 +53,7 @@ class ElfaGetTrendingTokens(ElfaBaseTool):
"""

name: str = "elfa_get_trending_tokens"
description: str = (
"""This tool ranks the most discussed tokens based on smart mentions count for a given period, with updates every 5 minutes via the Elfa API. Smart mentions provide a more sophisticated measure of discussion volume than simple keyword counts.
description: str = """This tool ranks the most discussed tokens based on smart mentions count for a given period, with updates every 5 minutes via the Elfa API. Smart mentions provide a more sophisticated measure of discussion volume than simple keyword counts.
**Use Cases:**
Expand All @@ -65,7 +64,6 @@ class ElfaGetTrendingTokens(ElfaBaseTool):
**Example Usage:**
To use this tool, you would typically specify a time window (e.g., the last hour, the last 24 hours). The tool will then return a ranked list of tokens, along with their corresponding smart mention counts."""
)
args_schema: Type[BaseModel] = ElfaGetTrendingTokensInput

def _run(
Expand Down

0 comments on commit 00eeb7d

Please sign in to comment.