Skip to content

Commit 611b101

Browse files
committed
Refactoring to reduce code duplication
1 parent 34a05d4 commit 611b101

File tree

1 file changed

+63
-122
lines changed

1 file changed

+63
-122
lines changed

mikupad.html

+63-122
Original file line numberDiff line numberDiff line change
@@ -1734,48 +1734,21 @@
17341734
}),
17351735
signal,
17361736
});
1737-
if (!res.ok)
1737+
1738+
if (!res.ok) {
17381739
throw new Error(`HTTP ${res.status}`);
1739-
if (options.stream) {
1740-
for await (const chunk of parseEventStream(res.body)) {
1740+
}
1741+
1742+
async function* yieldTokens(chunks) {
1743+
for await (const chunk of chunks) {
1744+
const token = chunk.content || chunk.token;
17411745
const choice = chunk.completion_probabilities?.[0];
17421746

1743-
let probs = [];
1744-
if (choice?.probs) {
1745-
probs = choice.probs ?? [];
1746-
} else if (choice?.top_logprobs) {
1747-
probs = Object.values(choice.top_logprobs).map(({ token, logprob }) => ({
1748-
tok_str: token,
1749-
prob: Math.exp(logprob)
1750-
}));
1751-
}
1752-
const prob = probs.find(p => p.tok_str === chunk.content)?.prob;
1753-
1754-
yield {
1755-
content: chunk.content,
1756-
...(probs.length > 0 ? {
1757-
prob: prob ?? -1,
1758-
completion_probabilities: [{
1759-
content: chunk.content,
1760-
probs
1761-
}]
1762-
} : {})
1763-
};
1764-
}
1765-
} else {
1766-
const { completion_probabilities } = await res.json();
1767-
for (const chunk of completion_probabilities) {
1768-
const token = chunk.content ? chunk.content : chunk.token;
1769-
1770-
let probs = [];
1771-
if (chunk.probs) {
1772-
probs = chunk.probs ?? [];
1773-
} else if (chunk.top_logprobs) {
1774-
probs = Object.values(chunk.top_logprobs).map(({ token, logprob }) => ({
1747+
const probs = choice?.probs ??
1748+
Object.values(choice?.top_logprobs || chunk.top_logprobs || {}).map(({ token, logprob }) => ({
17751749
tok_str: token,
17761750
prob: Math.exp(logprob)
17771751
}));
1778-
}
17791752
const prob = probs.find(p => p.tok_str === token)?.prob;
17801753

17811754
yield {
@@ -1790,6 +1763,13 @@
17901763
};
17911764
}
17921765
}
1766+
1767+
if (options.stream) {
1768+
yield* await yieldTokens(parseEventStream(res.body));
1769+
} else {
1770+
const { completion_probabilities } = await res.json();
1771+
yield* await yieldTokens(completion_probabilities);
1772+
}
17931773
}
17941774

17951775
async function koboldCppTokenCount({ endpoint, endpointAPIKey, proxyEndpoint, signal, ...options }) {
@@ -1872,19 +1852,16 @@
18721852
}),
18731853
signal,
18741854
});
1875-
if (!res.ok)
1855+
1856+
if (!res.ok) {
18761857
throw new Error(`HTTP ${res.status}`);
1877-
if (options.stream) {
1878-
for await (const chunk of parseEventStream(res.body)) {
1879-
yield { content: chunk.token };
1880-
}
1881-
} else {
1882-
const { results } = await res.json();
1883-
const chunks = results?.[0].logprobs?.content ?? [];
1884-
for (const chunk of chunks) {
1858+
}
1859+
1860+
async function* yieldTokens(chunks) {
1861+
for await (const chunk of chunks) {
18851862
const { token, top_logprobs } = chunk;
1886-
1887-
const probs = Object.values(top_logprobs).map(({ token, logprob }) => ({
1863+
1864+
const probs = Object.values(top_logprobs ?? {}).map(({ token, logprob }) => ({
18881865
tok_str: token,
18891866
prob: Math.exp(logprob)
18901867
}));
@@ -1902,6 +1879,13 @@
19021879
};
19031880
}
19041881
}
1882+
1883+
if (options.stream) {
1884+
yield* await yieldTokens(parseEventStream(res.body));
1885+
} else {
1886+
const { results } = await res.json();
1887+
yield* await yieldTokens(results?.[0].logprobs?.content ?? []);
1888+
}
19051889
}
19061890

19071891
async function koboldCppAbortCompletion({ endpoint, proxyEndpoint, ...options }) {
@@ -2193,18 +2177,16 @@
21932177
throw new Error(`HTTP ${res.status}`);
21942178
}
21952179

2196-
if (options.stream) {
2197-
for await (const chunk of parseEventStream(res.body)) {
2180+
async function* yieldTokens(chunks) {
2181+
for await (const chunk of chunks) {
21982182
if (!chunk.choices || chunk.choices.length === 0) {
2199-
if (chunk.content) {
2200-
yield { content: chunk.content };
2201-
}
2183+
if (chunk.content) yield { content: chunk.content };
22022184
continue;
22032185
}
22042186

22052187
const { text, logprobs } = chunk.choices[0];
22062188
const top_logprobs = logprobs?.top_logprobs?.[0] ?? {};
2207-
2189+
22082190
const probs = Object.entries(top_logprobs).map(([tok, logprob]) => ({
22092191
tok_str: tok,
22102192
prob: Math.exp(logprob)
@@ -2222,38 +2204,25 @@
22222204
} : {})
22232205
};
22242206
}
2207+
}
2208+
2209+
if (options.stream) {
2210+
yield* await yieldTokens(parseEventStream(res.body));
22252211
} else {
22262212
const { content, choices } = await res.json();
22272213
if (choices?.[0].logprobs?.tokens) {
2228-
const logprobs = choices?.[0].logprobs;
2229-
const chunks = Object.values(logprobs.tokens).map((token, i) => {
2230-
return { text: token, logprobs: { top_logprobs: [ logprobs.top_logprobs[i] ] } };
2231-
});
2232-
for (const chunk of chunks) {
2233-
const { text, logprobs } = chunk;
2234-
const top_logprobs = logprobs?.top_logprobs?.[0] ?? {};
2235-
2236-
const probs = Object.entries(top_logprobs).map(([tok, logprob]) => ({
2237-
tok_str: tok,
2238-
prob: Math.exp(logprob)
2239-
}));
2240-
const prob = probs.find(p => p.tok_str === text)?.prob;
2241-
2242-
yield {
2243-
content: text,
2244-
...(probs.length > 0 ? {
2245-
prob: prob ?? -1,
2246-
completion_probabilities: [{
2247-
content: text,
2248-
probs
2249-
}]
2250-
} : {})
2251-
};
2252-
}
2214+
const logprobs = choices[0].logprobs;
2215+
const chunks = Object.values(logprobs.tokens).map((token, i) => ({
2216+
choices: [{
2217+
text: token,
2218+
logprobs: { top_logprobs: [logprobs.top_logprobs[i]] }
2219+
}]
2220+
}));
2221+
yield* await yieldTokens(chunks);
22532222
} else if (choices?.[0].text) {
22542223
yield { content: choices[0].text };
22552224
} else if (content) { // llama.cpp specific?
2256-
yield { content: content };
2225+
yield { content };
22572226
}
22582227
}
22592228
}
@@ -2300,7 +2269,6 @@
23002269
}
23012270
}
23022271

2303-
23042272
async function* openaiChatCompletion({ endpoint, endpointAPIKey, proxyEndpoint, signal, ...options }) {
23052273
const res = await fetch(`${proxyEndpoint ?? endpoint}/v1/chat/completions`, {
23062274
method: 'POST',
@@ -2326,14 +2294,12 @@
23262294
throw new Error(`HTTP ${res.status}`);
23272295
}
23282296

2329-
if (options.stream) {
2330-
for await (const chunk of parseEventStream(res.body)) {
2297+
async function* yieldTokens(chunks) {
2298+
for await (const chunk of chunks) {
23312299
const token = chunk.choices[0].delta.content;
23322300
const top_logprobs = chunk.choices[0].logprobs?.content?.[0]?.top_logprobs ?? {};
2333-
if (!token) {
2334-
continue
2335-
}
2336-
2301+
if (!token) continue;
2302+
23372303
const probs = Object.values(top_logprobs).map(({ token, logprob }) => ({
23382304
tok_str: token,
23392305
prob: Math.exp(logprob)
@@ -2351,49 +2317,24 @@
23512317
} : {})
23522318
};
23532319
}
2320+
}
2321+
2322+
if (options.stream) {
2323+
yield* await yieldTokens(parseEventStream(res.body));
23542324
} else {
23552325
const { choices } = await res.json();
2356-
const chunks = choices?.[0].logprobs?.content ?? [];
2357-
if (chunks.length) {
2326+
const chunks = choices?.[0].logprobs?.content;
2327+
2328+
if (chunks?.length) {
23582329
const formattedChunks = chunks.map(chunk => ({
23592330
choices: [{
23602331
delta: { content: chunk.token },
2361-
logprobs: {
2362-
content: [{
2363-
top_logprobs: chunk.top_logprobs
2364-
}]
2365-
}
2332+
logprobs: { content: [{ top_logprobs: chunk.top_logprobs }] }
23662333
}]
23672334
}));
2368-
for await (const chunk of openaiBufferUtf8Stream(formattedChunks)) {
2369-
const token = chunk.choices[0].delta.content;
2370-
const top_logprobs = chunk.choices[0].logprobs?.content?.[0]?.top_logprobs ?? {};
2371-
if (!token) {
2372-
continue
2373-
}
2374-
2375-
const probs = Object.values(top_logprobs).map(({ token, logprob }) => ({
2376-
tok_str: token,
2377-
prob: Math.exp(logprob)
2378-
}));
2379-
const prob = probs.find(p => p.tok_str === token)?.prob;
2380-
2381-
yield {
2382-
content: token,
2383-
...(probs.length > 0 ? {
2384-
prob: prob ?? -1,
2385-
completion_probabilities: [{
2386-
content: token,
2387-
probs
2388-
}]
2389-
} : {})
2390-
};
2391-
}
2392-
} else {
2393-
const content = choices?.[0].message?.content;
2394-
if (content) {
2395-
yield { content: content };
2396-
}
2335+
yield* await yieldTokens(openaiBufferUtf8Stream(formattedChunks));
2336+
} else if (choices?.[0].message?.content) {
2337+
yield { content: choices[0].message.content };
23972338
}
23982339
}
23992340
}

0 commit comments

Comments
 (0)