Skip to content

Commit 34a05d4

Browse files
committed
Fix text decoding in OpenAI non-streaming responses
1 parent 47f7190 commit 34a05d4

File tree

1 file changed

+56
-2
lines changed

1 file changed

+56
-2
lines changed

mikupad.html

+56-2
Original file line numberDiff line numberDiff line change
@@ -2258,6 +2258,49 @@
22582258
}
22592259
}
22602260

2261+
async function* openaiBufferUtf8Stream(stream) {
2262+
const decoder = new TextDecoder('utf-8', { fatal: false });
2263+
2264+
function parseEscapedString(escapedStr) {
2265+
return new Uint8Array(
2266+
escapedStr
2267+
.split('\\x')
2268+
.slice(1)
2269+
.map(hex => parseInt(hex, 16))
2270+
);
2271+
}
2272+
2273+
const hasEscapedSequence = str => /\\x[0-9a-fA-F]{2}/.test(str);
2274+
const encoder = new TextEncoder();
2275+
2276+
for await (const chunk of stream) {
2277+
const content = chunk?.choices?.[0]?.delta?.content ?? chunk?.choices?.[0]?.text;
2278+
2279+
if (!content) {
2280+
yield chunk;
2281+
continue;
2282+
}
2283+
2284+
const binaryData = hasEscapedSequence(content)
2285+
? parseEscapedString(content)
2286+
: encoder.encode(content);
2287+
2288+
const decoded = decoder.decode(binaryData, { stream: true });
2289+
2290+
yield {
2291+
...chunk,
2292+
choices: [{
2293+
...chunk.choices[0],
2294+
...(chunk.choices[0].delta
2295+
? { delta: { ...chunk.choices[0].delta, content: decoded } }
2296+
: { text: decoded }
2297+
)
2298+
}]
2299+
};
2300+
}
2301+
}
2302+
2303+
22612304
async function* openaiChatCompletion({ endpoint, endpointAPIKey, proxyEndpoint, signal, ...options }) {
22622305
const res = await fetch(`${proxyEndpoint ?? endpoint}/v1/chat/completions`, {
22632306
method: 'POST',
@@ -2312,8 +2355,19 @@
23122355
const { choices } = await res.json();
23132356
const chunks = choices?.[0].logprobs?.content ?? [];
23142357
if (chunks.length) {
2315-
for (const chunk of chunks) {
2316-
const { token, top_logprobs } = chunk;
2358+
const formattedChunks = chunks.map(chunk => ({
2359+
choices: [{
2360+
delta: { content: chunk.token },
2361+
logprobs: {
2362+
content: [{
2363+
top_logprobs: chunk.top_logprobs
2364+
}]
2365+
}
2366+
}]
2367+
}));
2368+
for await (const chunk of openaiBufferUtf8Stream(formattedChunks)) {
2369+
const token = chunk.choices[0].delta.content;
2370+
const top_logprobs = chunk.choices[0].logprobs?.content?.[0]?.top_logprobs ?? {};
23172371
if (!token) {
23182372
continue
23192373
}

0 commit comments

Comments
 (0)