Skip to content

Commit

Permalink
Make it so navigating 1 page back close open dialogs
Browse files Browse the repository at this point in the history
  • Loading branch information
vemonet committed Jan 21, 2025
1 parent f7f509e commit d13eacd
Show file tree
Hide file tree
Showing 8 changed files with 58 additions and 48 deletions.
1 change: 1 addition & 0 deletions chat-with-context/demo/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,7 @@ <h2 class="text-xl text-center font-semibold border-b">
feedback-endpoint="{{ feedback_endpoint }}"
api-key="{{ api_key }}"
examples="{{ examples }}"
model="gpt-4o"
>
</chat-with-context>
</div>
Expand Down
1 change: 1 addition & 0 deletions chat-with-context/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,7 @@ <h2 class="text-xl text-center font-semibold border-b pb-2">
chat-endpoint="http://localhost:8000/chat"
feedback-endpoint="http://localhost:8000/feedback"
api-key="%EXPASY_API_KEY%"
model="gpt-4o-mini"
examples="Which resources are available at the SIB?,How can I get the HGNC symbol for the protein P68871?,What are the rat orthologs of the human TP53?,Where is expressed the gene ACE2 in human?,Anatomical entities where the INS zebrafish gene is expressed and its gene GO annotations,List the genes in primates orthologous to genes expressed in the fruit fly eye"
></chat-with-context>
</div>
Expand Down
4 changes: 2 additions & 2 deletions chat-with-context/package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion chat-with-context/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@sib-swiss/chat-with-context",
"version": "0.0.10",
"version": "0.0.11",
"description": "A web component to easily deploy a chat with context.",
"license": "MIT",
"author": {
Expand Down
80 changes: 44 additions & 36 deletions chat-with-context/src/chat-with-context.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -24,15 +24,16 @@ import {streamResponse, ChatState} from "./providers";
* Custom element to create a chat interface with a context-aware assistant.
* @example <chat-with-context api="http://localhost:8000/"></chat-with-context>
*/
customElement("chat-with-context", {chatEndpoint: "", examples: "", apiKey: "", feedbackEndpoint: ""}, props => {
customElement("chat-with-context", {chatEndpoint: "", examples: "", apiKey: "", feedbackEndpoint: "", model: "gpt-4o"}, props => {
noShadowDOM();
hljs.registerLanguage("ttl", hljsDefineTurtle);
hljs.registerLanguage("sparql", hljsDefineSparql);

const [warningMsg, setWarningMsg] = createSignal("");
const [loading, setLoading] = createSignal(false);
const [feedbackSent, setFeedbackSent] = createSignal(false);
const [selectedTab, setSelectedTab] = createSignal("");
const [selectedDocsTab, setSelectedDocsTab] = createSignal("");
const [dialogOpen, setDialogOpen] = createSignal("");

const [feedbackEndpoint, setFeedbackEndpoint] = createSignal("");

Expand All @@ -41,13 +42,15 @@ customElement("chat-with-context", {chatEndpoint: "", examples: "", apiKey: "",
apiUrl: props.chatEndpoint,
// eslint-disable-next-line solid/reactivity
apiKey: props.apiKey,
model: "gpt-4o-mini",
// eslint-disable-next-line solid/reactivity
model: props.model,
});
let chatContainerEl!: HTMLDivElement;
let inputTextEl!: HTMLTextAreaElement;
// eslint-disable-next-line solid/reactivity
const examples = props.examples.split(",").map(value => value.trim());

// Fix the height of the chat input
createEffect(() => {
if (props.chatEndpoint === "") setWarningMsg("Please provide an API URL for the chat component to work.");

Expand All @@ -58,6 +61,30 @@ customElement("chat-with-context", {chatEndpoint: "", examples: "", apiKey: "",
// setFeedbackEndpoint(props.feedbackEndpoint.endsWith("/") ? props.feedbackEndpoint : props.feedbackEndpoint + "/");
});

const openDialog = (dialogId: string) => {
setDialogOpen(dialogId);
(document.getElementById(dialogId) as HTMLDialogElement).showModal();
history.pushState({ dialogOpen: true }, '');
highlightAll();
}

const closeDialog = () => {
(document.getElementById(dialogOpen()) as HTMLDialogElement).close()
setDialogOpen("");
// history.back();
}

// Close open dialogs with the browser navigation "go back one page" button
createEffect(() => {
window.addEventListener('popstate', (event) => {
if (dialogOpen()) {
event.preventDefault();
closeDialog();
}
});
});


const highlightAll = () => {
document.querySelectorAll("pre code:not(.hljs)").forEach(block => {
hljs.highlightElement(block as HTMLElement);
Expand All @@ -67,10 +94,7 @@ customElement("chat-with-context", {chatEndpoint: "", examples: "", apiKey: "",
// Send the user input to the chat API
async function submitInput(question: string) {
if (!question.trim()) return;
if (loading()) {
// setWarningMsg("⏳ Thinking...");
return;
}
if (loading()) return;
inputTextEl.value = "";
setLoading(true);
setWarningMsg("");
Expand All @@ -80,7 +104,7 @@ customElement("chat-with-context", {chatEndpoint: "", examples: "", apiKey: "",
await streamResponse(state, question);
} catch (error) {
if (error instanceof Error && error.name !== "AbortError") {
console.error("An error occurred when querying the API", error);
console.error("An error occurred when querying the API:", error);
setWarningMsg("An error occurred when querying the API. Please try again or contact an admin.");
}
}
Expand Down Expand Up @@ -125,33 +149,26 @@ customElement("chat-with-context", {chatEndpoint: "", examples: "", apiKey: "",
{(step, iStep) =>
step.retrieved_docs.length > 0 ? (
<>
{/* Add reference docs dialog */}
{/* Dialog to show more details about retrieved documents */}
<button
class="text-gray-400 ml-8 mb-4"
title={`Click to see the documents used to generate the response\n\nNode: ${step.node_id}`}
onClick={() => {
(
document.getElementById(`source-dialog-${iMsg()}-${iStep()}`) as HTMLDialogElement
).showModal();
setSelectedTab(step.retrieved_docs[0].metadata.doc_type);
highlightAll();
setSelectedDocsTab(step.retrieved_docs[0].metadata.doc_type);
openDialog(`step-dialog-${iMsg()}-${iStep()}`);
}}
>
{step.label}
</button>
<dialog
id={`source-dialog-${iMsg()}-${iStep()}`}
id={`step-dialog-${iMsg()}-${iStep()}`}
class="bg-white dark:bg-gray-800 m-3 rounded-3xl shadow-md w-full"
>
<button
id={`close-dialog-${iMsg()}-${iStep()}`}
class="fixed top-2 right-8 m-3 px-2 text-xl text-slate-500 bg-gray-200 dark:bg-gray-700 rounded-3xl"
title="Close references"
onClick={() =>
(
document.getElementById(`source-dialog-${iMsg()}-${iStep()}`) as HTMLDialogElement
).close()
}
title="Close documents details"
onClick={() => closeDialog()}
>
<img src={xLogo} alt="Close the dialog" class="iconBtn" />
</button>
Expand All @@ -161,12 +178,12 @@ customElement("chat-with-context", {chatEndpoint: "", examples: "", apiKey: "",
{docType => (
<button
class={`px-4 py-2 rounded-lg transition-all ${
selectedTab() === docType
selectedDocsTab() === docType
? "bg-gray-600 text-white shadow-md"
: "bg-gray-200 text-gray-700 hover:bg-gray-300"
}`}
onClick={() => {
setSelectedTab(docType);
setSelectedDocsTab(docType);
highlightAll();
}}
title="Show only this type of document"
Expand All @@ -176,7 +193,7 @@ customElement("chat-with-context", {chatEndpoint: "", examples: "", apiKey: "",
)}
</For>
</div>
<For each={step.retrieved_docs.filter(doc => doc.metadata.doc_type === selectedTab())}>
<For each={step.retrieved_docs.filter(doc => doc.metadata.doc_type === selectedDocsTab())}>
{(doc, iDoc) => (
<>
<p>
Expand Down Expand Up @@ -206,16 +223,11 @@ customElement("chat-with-context", {chatEndpoint: "", examples: "", apiKey: "",
</>
) : step.details ? (
<>
{/* Dialog to show more details in markdown */}
{/* Dialog to show more details about a step in markdown */}
<button
class="text-gray-400 ml-8 mb-4"
title={`Click to see the documents used to generate the response\n\nNode: ${step.node_id}`}
onClick={() => {
(
document.getElementById(`step-dialog-${iMsg()}-${iStep()}`) as HTMLDialogElement
).showModal();
highlightAll();
}}
onClick={() => {openDialog(`step-dialog-${iMsg()}-${iStep()}`)}}
>
{step.label}
</button>
Expand All @@ -227,11 +239,7 @@ customElement("chat-with-context", {chatEndpoint: "", examples: "", apiKey: "",
id={`close-dialog-${iMsg()}-${iStep()}`}
class="fixed top-2 right-8 m-3 px-2 text-xl text-slate-500 bg-gray-200 dark:bg-gray-700 rounded-3xl"
title="Close step details"
onClick={() =>
(
document.getElementById(`step-dialog-${iMsg()}-${iStep()}`) as HTMLDialogElement
).close()
}
onClick={() => closeDialog()}
>
<img src={xLogo} alt="Close the dialog" class="iconBtn" />
</button>
Expand Down
1 change: 0 additions & 1 deletion chat-with-context/src/highlight.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@ Website: https://www.w3.org/TR/sparql11-query/, http://www.w3.org/TR/sparql11-up
It's quite horrible highlighting https://github.com/highlightjs/highlightjs-turtle Variables are same colors as prefixed URIs...
There is also https://github.com/redmer/highlightjs-sparql but it is even worse because it imports a ttl language that does not exist, hats off
Only one solution: use YASGUI
*/

export function hljsDefineTurtle(hljs: any) {
Expand Down
2 changes: 0 additions & 2 deletions chat-with-context/src/providers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -210,8 +210,6 @@ async function streamCustomLangGraph(state: ChatState) {
// Combine with any leftover data from the previous iteration
const combined = partialLine + chunkStr;
for (const line of combined.split("\n").filter(line => line.trim() !== "")) {
// if (line === "data: [DONE]") return;
// if (line.startsWith("data: ")) {
// console.log(line)
try {
const json = JSON.parse(line);
Expand Down
15 changes: 9 additions & 6 deletions packages/expasy-agent/src/expasy_agent/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
from expasy_agent.config import settings
from expasy_agent.graph import graph

llm_model = "gpt-4o"
# llm_model = "gpt-4o"
# llm_model = "azure_ai/mistral-large"
# llm_model: str = "gpt-4o-mini"

Expand Down Expand Up @@ -60,7 +60,7 @@ class Message(BaseModel):

class ChatCompletionRequest(BaseModel):
messages: list[Message]
model: Optional[str] = llm_model
model: Optional[str] = "gpt-4o"
max_tokens: Optional[int] = 512
temperature: Optional[float] = 0.0
stream: Optional[bool] = False
Expand All @@ -69,7 +69,11 @@ class ChatCompletionRequest(BaseModel):


def convert_chunk_to_dict(obj: Any) -> Any:
"""Convert a langgraph chunk object to a dict."""
"""Recursively convert a langgraph chunk object to a dict.
Required because LangGraph objects are not serializable by default.
And they use a mix of tuples, dataclasses (BaseMessage) and pydantic BaseModel (BaseMessage).
"""
# {'retrieve': {'retrieved_docs': [Document(metadata={'endpoint_url':
# When sending a msg LangGraph sends a tuple with the message and the metadata
if isinstance(obj, tuple):
Expand All @@ -91,7 +95,6 @@ def convert_chunk_to_dict(obj: Any) -> Any:

async def stream_response(inputs: dict[str, list]):
"""Stream the response from the assistant."""
# messages-tuple
async for event, chunk in graph.astream(inputs, stream_mode=["messages", "updates"]):
# print(event)
chunk_dict = convert_chunk_to_dict({
Expand Down Expand Up @@ -121,15 +124,15 @@ def stream_dict(d: dict) -> str:

# @app.post("/chat/completions")
@app.post("/chat")
async def chat(request: Request):
async def chat(request: ChatCompletionRequest):
"""Chat with the assistant main endpoint."""
auth_header = request.headers.get("Authorization")
if settings.expasy_api_key and (not auth_header or not auth_header.startswith("Bearer ")):
raise ValueError("Missing or invalid Authorization header")
if settings.expasy_api_key and auth_header.split(" ")[1] != settings.expasy_api_key:
raise ValueError("Invalid API key")

request = ChatCompletionRequest(**await request.json())
# request = ChatCompletionRequest(**await request.json())
# request.messages = [msg for msg in request.messages if msg.role != "system"]
# request.messages = [Message(role="system", content=settings.system_prompt), *request.messages]

Expand Down

0 comments on commit d13eacd

Please sign in to comment.