1
1
import { cleanContent } from '../helpers' ;
2
2
import { logger } from '@cardstack/runtime-common' ;
3
- import { MatrixClient , sendError , sendMessage , sendOption } from './matrix' ;
3
+ import {
4
+ MatrixClient ,
5
+ sendError ,
6
+ sendMessage ,
7
+ sendCommandMessage ,
8
+ } from './matrix' ;
4
9
5
10
import * as Sentry from '@sentry/node' ;
6
11
import { OpenAIError } from 'openai/error' ;
@@ -9,6 +14,8 @@ import { ISendEventResponse } from 'matrix-js-sdk/lib/matrix';
9
14
import { ChatCompletionMessageToolCall } from 'openai/resources/chat/completions' ;
10
15
import { FunctionToolCall } from '@cardstack/runtime-common/helpers/ai' ;
11
16
import { thinkingMessage } from '../constants' ;
17
+ import { APP_BOXEL_COMMAND_MSGTYPE } from '@cardstack/runtime-common/matrix-constants' ;
18
+ import type OpenAI from 'openai' ;
12
19
13
20
let log = logger ( 'ai-bot' ) ;
14
21
@@ -19,6 +26,8 @@ export class Responder {
19
26
initialMessageReplaced = false ;
20
27
client : MatrixClient ;
21
28
roomId : string ;
29
+ includesFunctionToolCall = false ;
30
+ latestContent ?: string ;
22
31
messagePromises : Promise < ISendEventResponse | void > [ ] = [ ] ;
23
32
debouncedMessageSender : (
24
33
content : string ,
@@ -35,14 +44,22 @@ export class Responder {
35
44
eventToUpdate : string | undefined ,
36
45
isStreamingFinished = false ,
37
46
) => {
47
+ this . latestContent = content ;
48
+ let dataOverrides : Record < string , string | boolean > = {
49
+ isStreamingFinished : isStreamingFinished ,
50
+ } ;
51
+ if ( this . includesFunctionToolCall ) {
52
+ dataOverrides = {
53
+ ...dataOverrides ,
54
+ msgtype : APP_BOXEL_COMMAND_MSGTYPE ,
55
+ } ;
56
+ }
38
57
const messagePromise = sendMessage (
39
58
this . client ,
40
59
this . roomId ,
41
60
content ,
42
61
eventToUpdate ,
43
- {
44
- isStreamingFinished : isStreamingFinished ,
45
- } ,
62
+ dataOverrides ,
46
63
) ;
47
64
this . messagePromises . push ( messagePromise ) ;
48
65
await messagePromise ;
@@ -63,9 +80,17 @@ export class Responder {
63
80
this . initialMessageId = initialMessage . event_id ;
64
81
}
65
82
66
- async onChunk ( chunk : {
67
- usage ?: { prompt_tokens : number ; completion_tokens : number } ;
68
- } ) {
83
+ async onChunk ( chunk : OpenAI . Chat . Completions . ChatCompletionChunk ) {
84
+ log . debug ( 'onChunk: ' , JSON . stringify ( chunk , null , 2 ) ) ;
85
+ if ( chunk . choices [ 0 ] . delta ?. tool_calls ?. [ 0 ] ?. function ) {
86
+ if ( ! this . includesFunctionToolCall ) {
87
+ this . includesFunctionToolCall = true ;
88
+ await this . debouncedMessageSender (
89
+ this . latestContent || '' ,
90
+ this . initialMessageId ,
91
+ ) ;
92
+ }
93
+ }
69
94
// This usage value is set *once* and *only once* at the end of the conversation
70
95
// It will be null at all other times.
71
96
if ( chunk . usage ) {
@@ -76,6 +101,7 @@ export class Responder {
76
101
}
77
102
78
103
async onContent ( snapshot : string ) {
104
+ log . debug ( 'onContent: ' , snapshot ) ;
79
105
await this . debouncedMessageSender (
80
106
cleanContent ( snapshot ) ,
81
107
this . initialMessageId ,
@@ -87,6 +113,7 @@ export class Responder {
87
113
role : string ;
88
114
tool_calls ?: ChatCompletionMessageToolCall [ ] ;
89
115
} ) {
116
+ log . debug ( 'onMessage: ' , msg ) ;
90
117
if ( msg . role === 'assistant' ) {
91
118
await this . handleFunctionToolCalls ( msg ) ;
92
119
}
@@ -111,14 +138,14 @@ export class Responder {
111
138
for ( const toolCall of msg . tool_calls || [ ] ) {
112
139
log . debug ( '[Room Timeline] Function call' , toolCall ) ;
113
140
try {
114
- let optionPromise = sendOption (
141
+ let commandMessagePromise = sendCommandMessage (
115
142
this . client ,
116
143
this . roomId ,
117
144
this . deserializeToolCall ( toolCall ) ,
118
- this . initialMessageReplaced ? undefined : this . initialMessageId ,
145
+ this . initialMessageId ,
119
146
) ;
120
- this . messagePromises . push ( optionPromise ) ;
121
- await optionPromise ;
147
+ this . messagePromises . push ( commandMessagePromise ) ;
148
+ await commandMessagePromise ;
122
149
this . initialMessageReplaced = true ;
123
150
} catch ( error ) {
124
151
Sentry . captureException ( error ) ;
@@ -127,7 +154,7 @@ export class Responder {
127
154
this . client ,
128
155
this . roomId ,
129
156
error ,
130
- this . initialMessageReplaced ? undefined : this . initialMessageId ,
157
+ this . initialMessageId ,
131
158
) ;
132
159
this . messagePromises . push ( errorPromise ) ;
133
160
await errorPromise ;
@@ -136,6 +163,7 @@ export class Responder {
136
163
}
137
164
138
165
async onError ( error : OpenAIError | string ) {
166
+ log . debug ( 'onError: ' , error ) ;
139
167
Sentry . captureException ( error ) ;
140
168
return await sendError (
141
169
this . client ,
@@ -146,6 +174,7 @@ export class Responder {
146
174
}
147
175
148
176
async finalize ( finalContent : string | void | null | undefined ) {
177
+ log . debug ( 'finalize: ' , finalContent ) ;
149
178
if ( finalContent ) {
150
179
finalContent = cleanContent ( finalContent ) ;
151
180
await this . debouncedMessageSender (
0 commit comments