Skip to content

Commit

Permalink
Merge branch 'Shonen-Labs:main' into ft/memorydb
Browse files Browse the repository at this point in the history
  • Loading branch information
tosoham authored Jan 21, 2025
2 parents 85d5f55 + dbb2086 commit d035041
Show file tree
Hide file tree
Showing 6 changed files with 123 additions and 755 deletions.
15 changes: 10 additions & 5 deletions .github/workflows/ci-cd.yml
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ jobs:
env:
NEXT_PUBLIC_API_URL: ${{ secrets.NEXT_PUBLIC_API_URL }}
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}

BRIAN_API_KEY: ${{ secrets.BRIAN_API_KEY }}

- name: Cache build output
uses: actions/cache@v4
Expand Down Expand Up @@ -99,12 +99,17 @@ jobs:
env:
NEXT_PUBLIC_API_URL: ${{ secrets.NEXT_PUBLIC_API_URL }}
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
BRIAN_API_KEY: ${{ secrets.BRIAN_API_KEY }}

# Deploy to your hosting platform
# Example for Vercel:
- name: Deploy to Vercel
run: npx vercel --prod --token ${{ secrets.VERCEL_TOKEN }}
run: |
if [ ${{ github.event_name }} == 'pull_request' ]; then
npx vercel --token ${{ secrets.VERCEL_TOKEN }}
else
npx vercel --prod --token ${{ secrets.VERCEL_TOKEN }}
fi
env:
VERCEL_TOKEN: ${{ secrets.VERCEL_TOKEN }}
VERCEL_PROJECT_ID: ${{ secrets.VERCEL_PROJECT_ID }}
VERCEL_ORG_ID: ${{ secrets.VERCEL_ORG_ID }}
VERCEL_ORG_ID: ${{ secrets.VERCEL_ORG_ID }}
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
3 changes: 2 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,8 @@ The application's architecture follows a multi-agent design, where each agent sp
```
npm run dev
```

### Setting up Telegram Test Environment
- Please go through this (doc)[https://docs.ton.org/v3/guidelines/dapps/tma/guidelines/testing-apps] to setup the telegram mini app test environment
## Using StarkFinder

### Available Agents
Expand Down
129 changes: 77 additions & 52 deletions client/app/api/ask/route.ts
Original file line number Diff line number Diff line change
@@ -1,74 +1,89 @@
/* eslint-disable @typescript-eslint/no-explicit-any */
// api/ask/route.ts
import { NextResponse } from 'next/server';
import { NextResponse } from "next/server";
import { ASK_OPENAI_AGENT_PROMPT } from "@/prompts/prompts";
import axios from 'axios';
import axios from "axios";
import { ChatOpenAI } from "@langchain/openai";
import { ChatPromptTemplate, SystemMessagePromptTemplate, HumanMessagePromptTemplate } from "@langchain/core/prompts";
import { START, END, MessagesAnnotation, MemorySaver, StateGraph } from "@langchain/langgraph";
import {
ChatPromptTemplate,
SystemMessagePromptTemplate,
HumanMessagePromptTemplate,
} from "@langchain/core/prompts";
import {
START,
END,
MessagesAnnotation,
MemorySaver,
StateGraph,
} from "@langchain/langgraph";
import { RemoveMessage } from "@langchain/core/messages";

const BRIAN_API_KEY = process.env.BRIAN_API_KEY || '';
const OPENAI_API_KEY = process.env.OPENAI_API_KEY || '';
const BRIAN_API_URL = 'https://api.brianknows.org/api/v0/agent/knowledge';
const BRIAN_DEFAULT_RESPONSE: string = "🤖 Sorry, I don’t know how to answer. The AskBrian feature allows you to ask for information on a custom-built knowledge base of resources. Contact the Brian team if you want to add new resources!";

const systemPrompt = ASK_OPENAI_AGENT_PROMPT + `\nThe provided chat history includes a summary of the earlier conversation.`;
const BRIAN_API_KEY = process.env.BRIAN_API_KEY || "";
const OPENAI_API_KEY = process.env.OPENAI_API_KEY || "";
const BRIAN_API_URL = "https://api.brianknows.org/api/v0/agent/knowledge";
const BRIAN_DEFAULT_RESPONSE: string =
"🤖 Sorry, I don’t know how to answer. The AskBrian feature allows you to ask for information on a custom-built knowledge base of resources. Contact the Brian team if you want to add new resources!";

const systemMessage = SystemMessagePromptTemplate.fromTemplate([
systemPrompt
]);
const systemPrompt =
ASK_OPENAI_AGENT_PROMPT +
`\nThe provided chat history includes a summary of the earlier conversation.`;

const userMessage = HumanMessagePromptTemplate.fromTemplate([
"{user_query}"
]);
const systemMessage = SystemMessagePromptTemplate.fromTemplate([systemPrompt]);

const userMessage = HumanMessagePromptTemplate.fromTemplate(["{user_query}"]);

const askAgentPromptTemplate = ChatPromptTemplate.fromMessages([
systemMessage,
userMessage
userMessage,
]);

if (!OPENAI_API_KEY) {
throw new Error("OpenAI API key is missing");
}

const agent = new ChatOpenAI({
modelName: "gpt-4o",
temperature: 0.5,
openAIApiKey: OPENAI_API_KEY
openAIApiKey: OPENAI_API_KEY,
});
const prompt = askAgentPromptTemplate;
// const chain = prompt.pipe(agent);
const initialCallModel = async (state: typeof MessagesAnnotation.State) => {
const messages = [
await systemMessage.format({brianai_answer: BRIAN_DEFAULT_RESPONSE}),
...state.messages
await systemMessage.format({ brianai_answer: BRIAN_DEFAULT_RESPONSE }),
...state.messages,
];
const response = await agent.invoke(messages);
return { messages: response };
};
const callModel = async (state: typeof MessagesAnnotation.State ) => {
const callModel = async (state: typeof MessagesAnnotation.State) => {
const messageHistory = state.messages.slice(0, -1);
if ( messageHistory.length >= 3 ) {
if (messageHistory.length >= 3) {
const lastHumanMessage = state.messages[state.messages.length - 1];
const summaryPrompt = `
Distill the above chat messages into a single summary message.
Include as many specific details as you can.
IMPORTANT NOTE: Include all information related to user's nature about trading and what kind of trader he/she is.
`;
// const summaryMessage = HumanMessagePromptTemplate.fromTemplate([summaryPrompt]);
const summary = await agent.invoke([
const summary = await agent.invoke([
...messageHistory,
{ role: "user", content: summaryPrompt },
]);
const deleteMessages = state.messages.map(
(m) => m.id ? new RemoveMessage({ id: m.id }) : null
const deleteMessages = state.messages.map((m) =>
m.id ? new RemoveMessage({ id: m.id }) : null
);
const humanMessage = { role: "user", content: lastHumanMessage.content };
const response = await agent.invoke([
await systemMessage.format({brianai_answer: BRIAN_DEFAULT_RESPONSE}),
await systemMessage.format({ brianai_answer: BRIAN_DEFAULT_RESPONSE }),
summary,
humanMessage,
]);
//console.log(response);
return {
messages: [summary, humanMessage, response, ...deleteMessages],
};
};
} else {
return await initialCallModel(state);
}
Expand All @@ -80,25 +95,32 @@ const workflow = new StateGraph(MessagesAnnotation)
.addEdge("model", END);
const app = workflow.compile({ checkpointer: new MemorySaver() });

async function queryOpenAI({userQuery, brianaiResponse}:
{userQuery: string, brianaiResponse: string}):
Promise<string> {
async function queryOpenAI({
userQuery,
brianaiResponse,
}: {
userQuery: string;
brianaiResponse: string;
}): Promise<string> {
try {
const response = await app.invoke(
{
messages: [
await prompt.format({brianai_answer: brianaiResponse, user_query: userQuery})
await prompt.format({
brianai_answer: brianaiResponse,
user_query: userQuery,
}),
],
},
{
configurable: { thread_id: "1" },
},
}
);
console.log(response);
return response.messages[response.messages.length-1].content as string;
return response.messages[response.messages.length - 1].content as string;
} catch (error) {
console.error('OpenAI Error:', error);
return 'Sorry, I am unable to process your request at the moment.';
console.error("OpenAI Error:", error);
return "Sorry, I am unable to process your request at the moment.";
}
}

Expand All @@ -108,17 +130,20 @@ async function queryBrianAI(prompt: string): Promise<string> {
BRIAN_API_URL,
{
prompt,
kb: "starknet_kb"
kb: "starknet_kb",
},
{
headers: {
"Content-Type": "application/json",
"x-brian-api-key": BRIAN_API_KEY,
}
},
}
);
const brianaiAnswer = response.data.result.answer;
const openaiAnswer = await queryOpenAI({brianaiResponse: brianaiAnswer, userQuery: prompt});
const openaiAnswer = await queryOpenAI({
brianaiResponse: brianaiAnswer,
userQuery: prompt,
});
return openaiAnswer;
} catch (error) {
console.error("Brian AI Error:", error);
Expand All @@ -129,16 +154,16 @@ async function queryBrianAI(prompt: string): Promise<string> {
export async function POST(request: Request) {
try {
const { prompt, address, messages } = await request.json();

// Filter out duplicate messages and only keep user messages
const uniqueMessages = messages
.filter((msg: any) => msg.sender === "user")
.reduce((acc: any[], curr: any) => {
// Only add if message content isn't already present
if (!acc.some(msg => msg.content === curr.content)) {
if (!acc.some((msg) => msg.content === curr.content)) {
acc.push({
sender: "user",
content: curr.content
content: curr.content,
});
}
return acc;
Expand All @@ -148,34 +173,34 @@ export async function POST(request: Request) {
prompt,
address: address || "0x0",
chainId: "4012",
messages: uniqueMessages
messages: uniqueMessages,
};

console.log('Request payload:', JSON.stringify(payload, null, 2));
console.log("Request payload:", JSON.stringify(payload, null, 2));

const response = await queryBrianAI(payload.prompt);
console.log('API Response:', response);

console.log("API Response:", response);

// Extract the answer from the result array
if (response) {
return NextResponse.json({ answer: response });
} else {
throw new Error('Unexpected API response format');
throw new Error("Unexpected API response format");
}
} catch (error: any) {
console.error('Detailed error:', {
console.error("Detailed error:", {
message: error.message,
response: error.response?.data,
status: error.response?.status,
});

return NextResponse.json(
{
error: 'Unable to get response from Brian\'s API',
details: error.response?.data || error.message
},
{
error: "Unable to get response from Brian's API",
details: error.response?.data || error.message,
},
{ status: error.response?.status || 500 }
);
}
}
}
Loading

0 comments on commit d035041

Please sign in to comment.