Skip to content

Commit 1a63682

Browse files
committed
Better thought steps
1 parent 0fa5fca commit 1a63682

File tree

3 files changed

+11
-7
lines changed

3 files changed

+11
-7
lines changed

src/backend/fastapi_app/rag_advanced.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -103,7 +103,7 @@ async def prepare_context(
103103
thoughts = [
104104
ThoughtStep(
105105
title="Prompt to generate search arguments",
106-
description=[str(message) for message in query_messages],
106+
description=query_messages,
107107
props=(
108108
{"model": self.chat_model, "deployment": self.chat_deployment}
109109
if self.chat_deployment
@@ -154,7 +154,7 @@ async def answer(
154154
+ [
155155
ThoughtStep(
156156
title="Prompt to generate answer",
157-
description=[str(message) for message in contextual_messages],
157+
description=contextual_messages,
158158
props=(
159159
{"model": self.chat_model, "deployment": self.chat_deployment}
160160
if self.chat_deployment
@@ -191,7 +191,7 @@ async def answer_stream(
191191
+ [
192192
ThoughtStep(
193193
title="Prompt to generate answer",
194-
description=[str(message) for message in contextual_messages],
194+
description=contextual_messages,
195195
props=(
196196
{"model": self.chat_model, "deployment": self.chat_deployment}
197197
if self.chat_deployment

src/backend/fastapi_app/rag_simple.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -102,7 +102,7 @@ async def answer(
102102
+ [
103103
ThoughtStep(
104104
title="Prompt to generate answer",
105-
description=[str(message) for message in contextual_messages],
105+
description=contextual_messages,
106106
props=(
107107
{"model": self.chat_model, "deployment": self.chat_deployment}
108108
if self.chat_deployment
@@ -139,7 +139,7 @@ async def answer_stream(
139139
+ [
140140
ThoughtStep(
141141
title="Prompt to generate answer",
142-
description=[str(message) for message in contextual_messages],
142+
description=contextual_messages,
143143
props=(
144144
{"model": self.chat_model, "deployment": self.chat_deployment}
145145
if self.chat_deployment

src/frontend/src/components/AnalysisPanel/ThoughtProcess.tsx

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,14 @@
11
import { Stack } from "@fluentui/react";
2-
import SyntaxHighlighter from "react-syntax-highlighter";
2+
import { Light as SyntaxHighlighter } from "react-syntax-highlighter";
3+
import json from "react-syntax-highlighter/dist/esm/languages/hljs/json";
4+
import { a11yLight } from "react-syntax-highlighter/dist/esm/styles/hljs";
35

46
import styles from "./AnalysisPanel.module.css";
57

68
import { Thoughts } from "../../api";
79

10+
SyntaxHighlighter.registerLanguage("json", json);
11+
812
interface Props {
913
thoughts: Thoughts[];
1014
}
@@ -25,7 +29,7 @@ export const ThoughtProcess = ({ thoughts }: Props) => {
2529
))}
2630
</Stack>
2731
{Array.isArray(t.description) ? (
28-
<SyntaxHighlighter language="json" wrapLongLines className={styles.tCodeBlock}>
32+
<SyntaxHighlighter language="json" wrapLongLines className={styles.tCodeBlock} style={a11yLight}>
2933
{JSON.stringify(t.description, null, 2)}
3034
</SyntaxHighlighter>
3135
) : (

0 commit comments

Comments
 (0)