Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: bump ai v4 #449

Merged
merged 15 commits into from
Nov 27, 2024
5 changes: 5 additions & 0 deletions .changeset/chilly-eels-retire.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
"create-llama": patch
---

chore: bump ai v4
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,9 @@ export class InterpreterTool implements BaseTool<InterpreterParameter> {
for (const filePath of input.sandboxFiles) {
const fileName = path.basename(filePath);
const localFilePath = path.join(this.uploadedFilesDir, fileName);
const content = fs.readFileSync(localFilePath);
const content = fs.readFileSync(
localFilePath,
) as unknown as ArrayBuffer;
await this.codeInterpreter?.files.write(filePath, content);
}
} catch (error) {
Expand Down
8 changes: 3 additions & 5 deletions templates/components/multiagent/typescript/nextjs/route.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { initObservability } from "@/app/observability";
import { StreamingTextResponse, type Message } from "ai";
import { LlamaIndexAdapter, type Message } from "ai";
import { NextRequest, NextResponse } from "next/server";
import { initSettings } from "./engine/settings";
import {
Expand Down Expand Up @@ -39,11 +39,9 @@ export async function POST(request: NextRequest) {
message: userMessageContent,
streaming: true,
});
const { stream, dataStream } =
const { stream, dataStream: data } =
await createStreamFromWorkflowContext(context);

// Return the two streams in one response
return new StreamingTextResponse(stream, {}, dataStream);
return LlamaIndexAdapter.toDataStreamResponse(stream, { data });
} catch (error) {
console.error("[LlamaIndex]", error);
return NextResponse.json(
Expand Down
42 changes: 34 additions & 8 deletions templates/components/multiagent/typescript/workflow/stream.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,15 +6,14 @@ import {
import {
StreamData,
createStreamDataTransformer,
trimStartOfStreamHelper,
parseDataStreamPart,
} from "ai";
import { ChatResponseChunk } from "llamaindex";
import { ChatResponseChunk, EngineResponse } from "llamaindex";
import { AgentRunEvent } from "./type";

export async function createStreamFromWorkflowContext<Input, Output, Context>(
context: WorkflowContext<Input, Output, Context>,
): Promise<{ stream: ReadableStream<string>; dataStream: StreamData }> {
const trimStartOfStream = trimStartOfStreamHelper();
): Promise<{ stream: AsyncIterable<EngineResponse>; dataStream: StreamData }> {
const dataStream = new StreamData();
const encoder = new TextEncoder();
let generator: AsyncGenerator<ChatResponseChunk> | undefined;
Expand Down Expand Up @@ -46,15 +45,20 @@ export async function createStreamFromWorkflowContext<Input, Output, Context>(
closeStreams(controller);
return;
}
const text = trimStartOfStream(chunk.delta ?? "");
if (text) {
controller.enqueue(encoder.encode(text));
if (chunk.delta) {
controller.enqueue(encoder.encode(chunk.delta));
}
},
});

const stream = mainStream
.pipeThrough(createStreamDataTransformer())
.pipeThrough(new TextDecoderStream());

const streamIterable = streamToAsyncIterable(stream);

return {
stream: mainStream.pipeThrough(createStreamDataTransformer()),
stream: streamIterable,
dataStream,
};
}
Expand All @@ -75,3 +79,25 @@ function handleEvent(
});
}
}

function streamToAsyncIterable(stream: ReadableStream<string>) {
const streamIterable: AsyncIterable<EngineResponse> = {
[Symbol.asyncIterator]() {
const reader = stream.getReader();
return {
async next() {
const { done, value } = await reader.read();
if (done) {
return { done: true, value: undefined };
}
const delta = parseDataStreamPart(value)?.value.toString() || "";
return {
done: false,
value: { delta } as unknown as EngineResponse,
};
},
};
},
};
return streamIterable;
}
2 changes: 1 addition & 1 deletion templates/types/streaming/express/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
"lint": "eslint ."
},
"dependencies": {
"ai": "3.3.42",
"ai": "4.0.3",
"cors": "^2.8.5",
"dotenv": "^16.3.1",
"duck-duck-scrape": "^2.2.5",
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { LlamaIndexAdapter, Message, StreamData, streamToResponse } from "ai";
import { LlamaIndexAdapter, Message, StreamData } from "ai";
import { Request, Response } from "express";
import { ChatMessage, Settings } from "llamaindex";
import { createChatEngine } from "./engine/chat";
Expand Down Expand Up @@ -43,7 +43,7 @@ export const chat = async (req: Request, res: Response) => {
});
});

const onFinal = (content: string) => {
const onCompletion = (content: string) => {
chatHistory.push({ role: "assistant", content: content });
generateNextQuestions(chatHistory)
.then((questions: string[]) => {
Expand All @@ -59,8 +59,21 @@ export const chat = async (req: Request, res: Response) => {
});
};

const stream = LlamaIndexAdapter.toDataStream(response, { onFinal });
return streamToResponse(stream, res, {}, vercelStreamData);
const streamResponse = LlamaIndexAdapter.toDataStreamResponse(response, {
data: vercelStreamData,
callbacks: { onCompletion },
});
if (streamResponse.body) {
const reader = streamResponse.body.getReader();
while (true) {
const { done, value } = await reader.read();
if (done) {
res.end();
return;
}
res.write(value);
}
}
} catch (error) {
console.error("[LlamaIndex]", error);
return res.status(500).json({
Expand Down
4 changes: 2 additions & 2 deletions templates/types/streaming/nextjs/app/api/chat/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ export async function POST(request: NextRequest) {
});
});

const onFinal = (content: string) => {
const onCompletion = (content: string) => {
chatHistory.push({ role: "assistant", content: content });
generateNextQuestions(chatHistory)
.then((questions: string[]) => {
Expand All @@ -74,7 +74,7 @@ export async function POST(request: NextRequest) {

return LlamaIndexAdapter.toDataStreamResponse(response, {
data: vercelStreamData,
callbacks: { onFinal },
callbacks: { onCompletion },
});
} catch (error) {
console.error("[LlamaIndex]", error);
Expand Down
4 changes: 3 additions & 1 deletion templates/types/streaming/nextjs/app/api/sandbox/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,9 @@ export async function POST(req: Request) {
artifact.files.forEach(async (sandboxFilePath) => {
const fileName = path.basename(sandboxFilePath);
const localFilePath = path.join("output", "uploaded", fileName);
const fileContent = await fs.readFile(localFilePath);
const fileContent = (await fs.readFile(
localFilePath,
)) as unknown as ArrayBuffer;

await sbx.files.write(sandboxFilePath, fileContent);
console.log(`Copied file to ${sandboxFilePath} in ${sbx.sandboxID}`);
Expand Down
4 changes: 2 additions & 2 deletions templates/types/streaming/nextjs/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,8 @@
"@radix-ui/react-select": "^2.1.1",
"@radix-ui/react-slot": "^1.0.2",
"@radix-ui/react-tabs": "^1.1.0",
"@llamaindex/chat-ui": "0.0.9",
"ai": "3.4.33",
"@llamaindex/chat-ui": "0.0.11",
"ai": "4.0.3",
"ajv": "^8.12.0",
"class-variance-authority": "^0.7.0",
"clsx": "^2.1.1",
Expand Down
Loading