Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: Improve FastAPI agentic template #447

Merged
merged 10 commits into from
Nov 26, 2024
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 12 additions & 6 deletions helpers/env-variables.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,12 @@ import {

import { TSYSTEMS_LLMHUB_API_URL } from "./providers/llmhub";

const DEFAULT_SYSTEM_PROMPT =
"You are a helpful assistant who helps users with their questions.";

const DATA_SOURCES_PROMPT =
"You have access to a knowledge base, use query engine tool to retrieve the facts to answer the user question.";

export type EnvVar = {
name?: string;
description?: string;
Expand Down Expand Up @@ -449,9 +455,6 @@ const getSystemPromptEnv = (
dataSources?: TemplateDataSource[],
template?: TemplateType,
): EnvVar[] => {
const defaultSystemPrompt =
"You are a helpful assistant who helps users with their questions.";

const systemPromptEnv: EnvVar[] = [];
// build tool system prompt by merging all tool system prompts
// multiagent template doesn't need system prompt
Expand All @@ -466,9 +469,12 @@ const getSystemPromptEnv = (
}
});

const systemPrompt = toolSystemPrompt
? `\"${toolSystemPrompt}\"`
: defaultSystemPrompt;
const systemPrompt =
"'" +
DEFAULT_SYSTEM_PROMPT +
(dataSources?.length ? `\n${DATA_SOURCES_PROMPT}` : "") +
(toolSystemPrompt ? `\n${toolSystemPrompt}` : "") +
"'";

systemPromptEnv.push({
name: "SYSTEM_PROMPT",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,17 +53,27 @@ async def content_generator(
# Merge the chat response generator and the event generator
combine = stream.merge(chat_response_generator, event_generator)
is_stream_started = False
async with combine.stream() as streamer:
async for output in streamer:
if not is_stream_started:
is_stream_started = True
# Stream a blank message to start displaying the response in the UI
yield cls.convert_text("")

yield output

if await request.is_disconnected():
break
try:
async with combine.stream() as streamer:
async for output in streamer:
if await request.is_disconnected():
break

if not is_stream_started:
is_stream_started = True
# Stream a blank message to start displaying the response in the UI
yield cls.convert_text("")

yield output
except Exception:
logger.exception("Error in stream response")
error_message = (
"An error occurred while processing your request. Please try again."
)
yield cls.convert_text(error_message)
finally:
# Ensure event handler is marked as done even if connection breaks
event_handler.is_done = True

@classmethod
async def _event_generator(cls, event_handler: EventCallbackHandler):
Expand Down
Loading