Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: Add multi-agent financial report use case for TS #394

Merged
merged 9 commits into from
Oct 25, 2024
28 changes: 27 additions & 1 deletion helpers/typescript.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import fs from "fs/promises";
import os from "os";
import path from "path";
import { bold, cyan, yellow } from "picocolors";
import { bold, cyan, red, yellow } from "picocolors";
import { assetRelocator, copy } from "../helpers/copy";
import { callPackageManager } from "../helpers/install";
import { templatesDir } from "./dir";
Expand All @@ -26,6 +26,7 @@ export const installTSTemplate = async ({
tools,
dataSources,
useLlamaParse,
agents,
}: InstallTemplateArgs & { backend: boolean }) => {
console.log(bold(`Using ${packageManager}.`));

Expand Down Expand Up @@ -132,6 +133,31 @@ export const installTSTemplate = async ({
cwd: path.join(multiagentPath, "workflow"),
});

// Copy agents use case code for multiagent template
if (agents) {
console.log("\nCopying agent:", agents, "\n");

const agentsCodePath = path.join(
compPath,
"agents",
"typescript",
agents,
);

await copy("**", path.join(root, relativeEngineDestPath, "workflow"), {
parents: true,
cwd: agentsCodePath,
rename: assetRelocator,
});
} else {
console.log(
red(
"There is no agent selected for multi-agent template. Please pick an agent to use via --agents flag.",
),
);
process.exit(1);
}

if (framework === "nextjs") {
// patch route.ts file
await copy("**", path.join(root, relativeEngineDestPath), {
Expand Down
30 changes: 13 additions & 17 deletions questions/simple.ts
Original file line number Diff line number Diff line change
Expand Up @@ -47,23 +47,19 @@ export const askSimpleQuestions = async (
let useLlamaCloud = false;

if (appType !== "extractor") {
// Default financial report agent use case only supports Python
// TODO: Add support for Typescript frameworks
if (appType !== "financial_report_agent") {
const { language: newLanguage } = await prompts(
{
type: "select",
name: "language",
message: "What language do you want to use?",
choices: [
{ title: "Python (FastAPI)", value: "fastapi" },
{ title: "Typescript (NextJS)", value: "nextjs" },
],
},
questionHandlers,
);
language = newLanguage;
}
const { language: newLanguage } = await prompts(
{
type: "select",
name: "language",
message: "What language do you want to use?",
choices: [
{ title: "Python (FastAPI)", value: "fastapi" },
{ title: "Typescript (NextJS)", value: "nextjs" },
],
},
questionHandlers,
);
language = newLanguage;

const { useLlamaCloud: newUseLlamaCloud } = await prompts(
{
Expand Down
65 changes: 65 additions & 0 deletions templates/components/agents/typescript/financial_report/agents.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
import { ChatMessage } from "llamaindex";
import { FunctionCallingAgent } from "./single-agent";
import { getQueryEngineTools, lookupTools } from "./tools";

export const createResearcher = async (
chatHistory: ChatMessage[],
params?: any,
) => {
const queryEngineTools = await getQueryEngineTools(params);

if (!queryEngineTools) {
throw new Error("Query engine tool not found");
}

return new FunctionCallingAgent({
name: "researcher",
tools: queryEngineTools,
systemPrompt: `You are a researcher agent. You are responsible for retrieving information from the corpus.
## Instructions:
+ Don't synthesize the information, just return the whole retrieved information.
+ Don't need to retrieve the information that is already provided in the chat history and response with: "There is no new information, please reuse the information from the conversation."
`,
chatHistory,
});
};

export const createAnalyst = async (chatHistory: ChatMessage[]) => {
let systemPrompt = `You are an expert in analyzing financial data.
You are given a task and a set of financial data to analyze. Your task is to analyze the financial data and return a report.
Your response should include a detailed analysis of the financial data, including any trends, patterns, or insights that you find.
Construct the analysis in a textual format like tables would be great!
Don't need to synthesize the data, just analyze and provide your findings.
Always use the provided information, don't make up any information yourself.`;
const tools = await lookupTools(["interpreter"]);
if (tools.length > 0) {
systemPrompt = `${systemPrompt}
You are able to visualize the financial data using code interpreter tool.
It's very useful to create and include visualizations to the report. Never include any code into the report, just the visualization.`;
}
return new FunctionCallingAgent({
name: "analyst",
tools: tools,
chatHistory,
});
};

export const createReporter = async (chatHistory: ChatMessage[]) => {
const tools = await lookupTools(["document_generator"]);
let systemPrompt = `You are a report generation assistant tasked with producing a well-formatted report given parsed context.
Given a comprehensive analysis of the user request, your task is to synthesize the information and return a well-formatted report.

## Instructions
You are responsible for representing the analysis in a well-formatted report. If tables or visualizations are provided, add them to the most relevant sections.
Finally, the report should be presented in markdown format.`;
if (tools.length > 0) {
systemPrompt = `${systemPrompt}.
You are also able to generate an HTML file of the report.`;
}
return new FunctionCallingAgent({
name: "reporter",
tools: tools,
systemPrompt: systemPrompt,
chatHistory,
});
};
159 changes: 159 additions & 0 deletions templates/components/agents/typescript/financial_report/factory.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,159 @@
import {
Context,
StartEvent,
StopEvent,
Workflow,
WorkflowEvent,
} from "@llamaindex/core/workflow";
import { Message } from "ai";
import { ChatMessage, ChatResponseChunk, Settings } from "llamaindex";
import { getAnnotations } from "../llamaindex/streaming/annotations";
import { createAnalyst, createReporter, createResearcher } from "./agents";
import { AgentInput, AgentRunEvent } from "./type";

const TIMEOUT = 360 * 1000;
const MAX_ATTEMPTS = 2;

class ResearchEvent extends WorkflowEvent<{ input: string }> {}
class AnalyzeEvent extends WorkflowEvent<{ input: string }> {}
class ReportEvent extends WorkflowEvent<{ input: string }> {}

const prepareChatHistory = (chatHistory: Message[]): ChatMessage[] => {
// By default, the chat history only contains the assistant and user messages
// all the agents messages are stored in annotation data which is not visible to the LLM

const MAX_AGENT_MESSAGES = 10;
const agentAnnotations = getAnnotations<{ agent: string; text: string }>(
chatHistory,
{ role: "assistant", type: "agent" },
).slice(-MAX_AGENT_MESSAGES);

const agentMessages = agentAnnotations
.map(
(annotation) =>
`\n<${annotation.data.agent}>\n${annotation.data.text}\n</${annotation.data.agent}>`,
)
.join("\n");

const agentContent = agentMessages
? "Here is the previous conversation of agents:\n" + agentMessages
: "";

if (agentContent) {
const agentMessage: ChatMessage = {
role: "assistant",
content: agentContent,
};
return [
...chatHistory.slice(0, -1),
agentMessage,
chatHistory.slice(-1)[0],
] as ChatMessage[];
}
return chatHistory as ChatMessage[];
};

export const createWorkflow = (messages: Message[], params?: any) => {
const chatHistoryWithAgentMessages = prepareChatHistory(messages);
const runAgent = async (
context: Context,
agent: Workflow,
input: AgentInput,
) => {
const run = agent.run(new StartEvent({ input }));
for await (const event of agent.streamEvents()) {
if (event.data instanceof AgentRunEvent) {
context.writeEventToStream(event.data);
}
}
return await run;
};

const start = async (context: Context, ev: StartEvent) => {
context.set("task", ev.data.input);

const chatHistoryStr = chatHistoryWithAgentMessages
.map((msg) => `${msg.role}: ${msg.content}`)
.join("\n");

// Decision-making process
const decision = await decideWorkflow(ev.data.input, chatHistoryStr);

if (decision !== "publish") {
return new ResearchEvent({
input: `Research for this task: ${ev.data.input}`,
});
} else {
return new ReportEvent({
input: `Publish content based on the chat history\n${chatHistoryStr}\n\n and task: ${ev.data.input}`,
});
}
};

const decideWorkflow = async (task: string, chatHistoryStr: string) => {
const llm = Settings.llm;

const prompt = `You are an expert in decision-making, helping people write and publish blog posts.
If the user is asking for a file or to publish content, respond with 'publish'.
If the user requests to write or update a blog post, respond with 'not_publish'.

Here is the chat history:
${chatHistoryStr}

The current user request is:
${task}

Given the chat history and the new user request, decide whether to publish based on existing information.
Decision (respond with either 'not_publish' or 'publish'):`;

const output = await llm.complete({ prompt: prompt });
const decision = output.text.trim().toLowerCase();
return decision === "publish" ? "publish" : "research";
};

const research = async (context: Context, ev: ResearchEvent) => {
const researcher = await createResearcher(
chatHistoryWithAgentMessages,
params,
);
const researchRes = await runAgent(context, researcher, {
message: ev.data.input,
});
const researchResult = researchRes.data.result;
return new AnalyzeEvent({
input: `Write a blog post given this task: ${context.get("task")} using this research content: ${researchResult}`,
});
};

const analyze = async (context: Context, ev: AnalyzeEvent) => {
const analyst = await createAnalyst(chatHistoryWithAgentMessages);
const analyzeRes = await runAgent(context, analyst, {
message: ev.data.input,
});
return new ReportEvent({
input: `Publish content based on the chat history\n${analyzeRes.data.result}\n\n and task: ${ev.data.input}`,
});
};

const report = async (context: Context, ev: ReportEvent) => {
const reporter = await createReporter(chatHistoryWithAgentMessages);

const reportResult = await runAgent(context, reporter, {
message: `${ev.data.input}`,
streaming: true,
});
return reportResult as unknown as StopEvent<
AsyncGenerator<ChatResponseChunk>
>;
};

const workflow = new Workflow({ timeout: TIMEOUT, validate: true });
workflow.addStep(StartEvent, start, {
outputs: [ResearchEvent, ReportEvent],
});
workflow.addStep(ResearchEvent, research, { outputs: AnalyzeEvent });
workflow.addStep(AnalyzeEvent, analyze, { outputs: ReportEvent });
workflow.addStep(ReportEvent, report, { outputs: StopEvent });

return workflow;
};
86 changes: 86 additions & 0 deletions templates/components/agents/typescript/financial_report/tools.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
import fs from "fs/promises";
import { BaseToolWithCall, LlamaCloudIndex, QueryEngineTool } from "llamaindex";
import path from "path";
import { getDataSource } from "../engine";
import { createTools } from "../engine/tools/index";

export const getQueryEngineTools = async (
params?: any,
): Promise<QueryEngineTool[] | null> => {
Comment on lines +7 to +9
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🛠️ Refactor suggestion

Define a specific type instead of using 'any' for 'params'

Using any for the params parameter reduces type safety and can lead to potential bugs. Consider defining a specific type or interface for params to enhance type checking and maintainability.

const topK = process.env.TOP_K ? parseInt(process.env.TOP_K) : undefined;

const index = await getDataSource(params);
if (!index) {
return null;
}
// index is LlamaCloudIndex use two query engine tools
if (index instanceof LlamaCloudIndex) {
return [
new QueryEngineTool({
queryEngine: index.asQueryEngine({
similarityTopK: topK,
retrieval_mode: "files_via_content",
}),
metadata: {
name: "document_retriever",
description: `Document retriever that retrieves entire documents from the corpus.
ONLY use for research questions that may require searching over entire research reports.
Will be slower and more expensive than chunk-level retrieval but may be necessary.`,
},
}),
new QueryEngineTool({
queryEngine: index.asQueryEngine({
similarityTopK: topK,
retrieval_mode: "chunks",
}),
metadata: {
name: "chunk_retriever",
description: `Retrieves a small set of relevant document chunks from the corpus.
Use for research questions that want to look up specific facts from the knowledge corpus,
and need entire documents.`,
},
}),
];
} else {
return [
new QueryEngineTool({
queryEngine: (index as any).asQueryEngine({
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🛠️ Refactor suggestion

Avoid using 'as any' with type assertions

Using as any bypasses TypeScript's type checking and can mask potential issues. Instead, refine the type of index or update type definitions so that index includes the asQueryEngine method. This will enhance type safety and maintainability.

similarityTopK: topK,
}),
metadata: {
name: "retriever",
description: `Use this tool to retrieve information about the text corpus from the index.`,
},
}),
];
}
};

export const getAvailableTools = async () => {
const configFile = path.join("config", "tools.json");
let toolConfig: any;
const tools: BaseToolWithCall[] = [];
try {
toolConfig = JSON.parse(await fs.readFile(configFile, "utf8"));
} catch (e) {
console.info(`Could not read ${configFile} file. Using no tools.`);
}
if (toolConfig) {
tools.push(...(await createTools(toolConfig)));
}
const queryEngineTools = await getQueryEngineTools();
if (queryEngineTools) {
tools.push(...queryEngineTools);
}

return tools;
};

export const lookupTools = async (
toolNames: string[],
): Promise<BaseToolWithCall[]> => {
const availableTools = await getAvailableTools();
return availableTools.filter((tool) =>
toolNames.includes(tool.metadata.name),
);
};
Loading
Loading