From bb844ce84f26f08205e2e215839a8570b6155abe Mon Sep 17 00:00:00 2001 From: Takala Date: Wed, 25 Dec 2024 20:54:53 +0800 Subject: [PATCH 1/4] feat: enhance summarizeGroupOpinions to return keyword importance and update prompt for clarity --- src/lib/server/llm.ts | 13 ++++++++----- src/lib/server/prompt.ts | 1 + 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/src/lib/server/llm.ts b/src/lib/server/llm.ts index 60f7064..7d972d9 100644 --- a/src/lib/server/llm.ts +++ b/src/lib/server/llm.ts @@ -394,9 +394,12 @@ export async function summarizeConcepts( } } -export async function summarizeGroupOpinions( - student_opinion: StudentSpeak[] -): Promise<{ success: boolean; summary: string; keywords: string[]; error?: string }> { +export async function summarizeGroupOpinions(student_opinion: StudentSpeak[]): Promise<{ + success: boolean; + summary: string; + keywords: Record; + error?: string; +}> { try { const formatted_opinions = student_opinion .filter((opinion) => opinion.role !== '摘要小幫手') @@ -409,7 +412,7 @@ export async function summarizeGroupOpinions( ); const summary_group_opinion_schema = z.object({ group_summary: z.string(), - group_key_points: z.array(z.string()) + group_key_points: z.record(z.string(), z.number().min(1).max(5)) }); const response = await requestZodLLM(system_prompt, summary_group_opinion_schema); @@ -430,7 +433,7 @@ export async function summarizeGroupOpinions( return { success: false, summary: '', - keywords: [], + keywords: {}, error: 'Failed to summarize group opinions' }; } diff --git a/src/lib/server/prompt.ts b/src/lib/server/prompt.ts index 493ddb2..9a52363 100644 --- a/src/lib/server/prompt.ts +++ b/src/lib/server/prompt.ts @@ -58,6 +58,7 @@ export const CONCEPT_SUMMARY_PROMPT = ` export const GROUP_OPINION_SUMMARY_PROMPT = ` 請總結以下學生們的小組觀點、想法和結論: +學生的關鍵字請以以單詞的方式提供,並給予每個單詞的在討論中重要度(1-5),數字越大代表詞彙越重要。 {groupOpinions} From 98f92e32523226819a4c64f72b67a7a0f66f3f41 Mon Sep 17 00:00:00 2001 From: Takala Date: Wed, 25 Dec 2024 21:05:31 +0800 Subject: [PATCH 2/4] fix: update prompt guidelines for clarity and student engagement --- src/lib/server/prompt.ts | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/lib/server/prompt.ts b/src/lib/server/prompt.ts index 9a52363..43636d8 100644 --- a/src/lib/server/prompt.ts +++ b/src/lib/server/prompt.ts @@ -5,8 +5,9 @@ export const DOCS_CONTEXT_SYSTEM_PROMPT = ` 3. 根據提供的文件引導學生達到次要目標 4. 確保回答與原始資料保持一致且準確 5. 不能直接主觀的給出答案,要引導學生自己思考 -6. 如果學生回答錯誤,要引導學生找到錯誤的原因並修正 -7. 永遠不要否定學生的答案,但不能偏離文件希望達到的目標 +6. 請不要直接說出文件中的內容,或是說你是參考文件的內容 +7. 如果學生回答錯誤,要引導學生找到錯誤的原因並修正 +8. 永遠不要否定學生的答案,但不能偏離文件希望達到的目標 主要問題: {task} From b5abcba210cf490a7131fc7c954546b07541b473 Mon Sep 17 00:00:00 2001 From: Takala Date: Wed, 25 Dec 2024 21:22:44 +0800 Subject: [PATCH 3/4] feat: enhance chatWithLLMByDocs to track subtask completion and update prompt guidelines --- src/lib/server/llm.ts | 7 ++++++- src/lib/server/prompt.ts | 2 ++ .../[group_number]/conversations/[conv_id]/chat/+server.ts | 3 ++- 3 files changed, 10 insertions(+), 2 deletions(-) diff --git a/src/lib/server/llm.ts b/src/lib/server/llm.ts index 7d972d9..709993b 100644 --- a/src/lib/server/llm.ts +++ b/src/lib/server/llm.ts @@ -179,6 +179,7 @@ export async function chatWithLLMByDocs( history: LLMChatMessage[], task: string, subtasks: string[], + subtaskCompleted: boolean[], resources: Resource[], temperature = 0.7 ): Promise<{ @@ -202,8 +203,12 @@ export async function chatWithLLMByDocs( }) .join('\n\n'); + const formattedSubtasks = subtasks.map((subtask, index) => { + return subtaskCompleted[index] ? `(完成)${subtask}` : `(未完成)subtask`; + }); + const system_prompt = DOCS_CONTEXT_SYSTEM_PROMPT.replace('{task}', task) - .replace('{subtasks}', subtasks.join('\n')) + .replace('{subtasks}', formattedSubtasks.join('\n')) .replace('{resources}', formatted_docs); const [response, subtask_completed, moderation, off_topic] = await Promise.all([ diff --git a/src/lib/server/prompt.ts b/src/lib/server/prompt.ts index 43636d8..2732f62 100644 --- a/src/lib/server/prompt.ts +++ b/src/lib/server/prompt.ts @@ -8,6 +8,8 @@ export const DOCS_CONTEXT_SYSTEM_PROMPT = ` 6. 請不要直接說出文件中的內容,或是說你是參考文件的內容 7. 如果學生回答錯誤,要引導學生找到錯誤的原因並修正 8. 永遠不要否定學生的答案,但不能偏離文件希望達到的目標 +9. 引導時先以未完成的次要目標為主 +10. 如果次要目標都達成了,請繼續跟學生對話,加深學生對於主要問題的理解 主要問題: {task} diff --git a/src/routes/api/session/[id]/group/[group_number]/conversations/[conv_id]/chat/+server.ts b/src/routes/api/session/[id]/group/[group_number]/conversations/[conv_id]/chat/+server.ts index 34f16e9..9f04bdf 100644 --- a/src/routes/api/session/[id]/group/[group_number]/conversations/[conv_id]/chat/+server.ts +++ b/src/routes/api/session/[id]/group/[group_number]/conversations/[conv_id]/chat/+server.ts @@ -26,7 +26,7 @@ export const POST: RequestHandler = async ({ request, params, locals }) => { throw error(400, 'Missing parameters'); } - const conversation_ref = await getConversationRef(id, group_number, conv_id); + const conversation_ref = getConversationRef(id, group_number, conv_id); console.log('Retrieved conversation reference'); const { userId, task, subtasks, resources, history, warning, subtaskCompleted } = await getConversationData(conversation_ref); @@ -58,6 +58,7 @@ export const POST: RequestHandler = async ({ request, params, locals }) => { [...chat_history, { role: 'user', content: content }], task, subtasks, + subtaskCompleted, resources ); console.log('Received LLM response', { From 9ef5814b0ea69cbef23e0db457e1a7a32851b456 Mon Sep 17 00:00:00 2001 From: Takala Date: Wed, 25 Dec 2024 21:40:06 +0800 Subject: [PATCH 4/4] feat: update summarizeGroupOpinions to return keyword strength and modify chatWithLLMByDocs to track subtask completion --- src/lib/server/llm.ts | 13 +++++++++++-- .../group/[group_number]/conversations/+server.ts | 8 +++++++- 2 files changed, 18 insertions(+), 3 deletions(-) diff --git a/src/lib/server/llm.ts b/src/lib/server/llm.ts index 709993b..396b7ef 100644 --- a/src/lib/server/llm.ts +++ b/src/lib/server/llm.ts @@ -417,7 +417,12 @@ export async function summarizeGroupOpinions(student_opinion: StudentSpeak[]): P ); const summary_group_opinion_schema = z.object({ group_summary: z.string(), - group_key_points: z.record(z.string(), z.number().min(1).max(5)) + group_keywords: z.array( + z.object({ + keyword: z.string(), + strength: z.number() + }) + ) }); const response = await requestZodLLM(system_prompt, summary_group_opinion_schema); @@ -427,11 +432,15 @@ export async function summarizeGroupOpinions(student_opinion: StudentSpeak[]): P } const message = response.message as z.infer; + const formatted_keywords = message.group_keywords.reduce( + (acc, keyword) => ({ ...acc, [keyword.keyword]: keyword.strength }), + {} as Record + ); return { success: true, summary: message.group_summary, - keywords: message.group_key_points + keywords: formatted_keywords }; } catch (error) { console.error('Error in summarizeGroupOpinions:', error); diff --git a/src/routes/api/session/[id]/group/[group_number]/conversations/+server.ts b/src/routes/api/session/[id]/group/[group_number]/conversations/+server.ts index 7119f8a..6786c0d 100644 --- a/src/routes/api/session/[id]/group/[group_number]/conversations/+server.ts +++ b/src/routes/api/session/[id]/group/[group_number]/conversations/+server.ts @@ -25,7 +25,13 @@ export const GET: RequestHandler = async ({ params, locals }) => { const group_ref = getGroupRef(id, group_number); const group_data = await getGroupData(group_ref); - const intro = await chatWithLLMByDocs([], task, subtasks, resources); + const intro = await chatWithLLMByDocs( + [], + task, + subtasks, + new Array(subtasks.length).fill(false), + resources + ); if (!intro) { throw error(500, 'Error generating intro message'); }