Skip to content

Commit 929523e

Browse files
Hany-Almnaembajtosjuliangruber
authored
feat: add client-side max request duration timeout to Spark (#99) (#124)
* feat: add client-side max request duration timeout to Spark (#99) * refactor(fetchCAR): cleanup based on review feedback - Moved MAX_REQUEST_DURATION_MS to constants - Fixed indentation - Removed maxTimeout field in favor of unified timeout * Refactor fetchCAR timeouts for clarity and testability * feat: add jitter to delay between tasks (#125) feat: add jitter to delay between tasks Introduce random jitter in the delay between subsequent tasks (retrieval checks). The goal is to better spread out the requests from the network in the time, to avoid too many nodes hitting services like cid.contact at the same time. Signed-off-by: Miroslav Bajtoš <oss@bajtos.net> Co-authored-by: Julian Gruber <julian@juliangruber.com> * v1.18.2 * chore: rename maxFetchDurationMs to maxRequestDurationMs for consistency * feat: add client-side max request duration timeout to Spark (#99) * clean up * fix indentation * clean up --------- Signed-off-by: Miroslav Bajtoš <oss@bajtos.net> Co-authored-by: Miroslav Bajtoš <oss@bajtos.net> Co-authored-by: Julian Gruber <julian@juliangruber.com>
1 parent 431a211 commit 929523e

File tree

3 files changed

+44
-7
lines changed

3 files changed

+44
-7
lines changed

lib/constants.js

+1
Original file line numberDiff line numberDiff line change
@@ -5,3 +5,4 @@ export const MAX_JITTER_BETWEEN_TASKS_IN_MS = 10_000 // 10 seconds
55
export const RPC_URL = 'https://api.node.glif.io/'
66
export const RPC_AUTH = 'KZLIUb9ejreYOm-mZFM3UNADE0ux6CrHjxnS2D2Qgb8='
77
export const MINER_TO_PEERID_CONTRACT_ADDRESS = '0x14183aD016Ddc83D638425D6328009aa390339Ce' // Contract address on the Filecoin EVM
8+
export const MAX_REQUEST_DURATION_MS = 90_000

lib/spark.js

+15-7
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
/* global Zinnia */
22

33
import { ActivityState } from './activity-state.js'
4-
import { SPARK_VERSION, MAX_CAR_SIZE, APPROX_ROUND_LENGTH_IN_MS, MAX_JITTER_BETWEEN_TASKS_IN_MS } from './constants.js'
4+
import { SPARK_VERSION, MAX_CAR_SIZE, APPROX_ROUND_LENGTH_IN_MS, MAX_JITTER_BETWEEN_TASKS_IN_MS, MAX_REQUEST_DURATION_MS } from './constants.js'
55
import { queryTheIndex } from './ipni-client.js'
66
import { assertOkResponse } from './http-assertions.js'
77
import { getIndexProviderPeerId as defaultGetIndexProvider } from './miner-info.js'
@@ -83,20 +83,27 @@ export default class Spark {
8383
}
8484
}
8585

86-
async fetchCAR (protocol, address, cid, stats) {
86+
async fetchCAR (protocol, address, cid, stats, { maxRequestDurationMs = MAX_REQUEST_DURATION_MS } = {}) {
8787
// Abort if no progress was made for 60 seconds
8888
const controller = new AbortController()
8989
const { signal } = controller
90-
let timeout
90+
91+
let requestIdleTimeout
92+
let maxDurationTimeout
93+
9194
const resetTimeout = () => {
92-
if (timeout) {
93-
clearTimeout(timeout)
95+
if (requestIdleTimeout) {
96+
clearTimeout(requestIdleTimeout)
9497
}
95-
timeout = setTimeout(() => {
98+
requestIdleTimeout = setTimeout(() => {
9699
stats.timeout = true
97100
controller.abort()
98101
}, 60_000)
99102
}
103+
maxDurationTimeout = setTimeout(() => {
104+
stats.timeout = true
105+
controller.abort()
106+
}, maxRequestDurationMs)
100107

101108
// WebCrypto API does not support streams yet, the hashing function requires entire data
102109
// to be provided at once. See https://github.com/w3c/webcrypto/issues/73
@@ -157,7 +164,8 @@ export default class Spark {
157164
stats.statusCode = mapErrorToStatusCode(err)
158165
}
159166
} finally {
160-
clearTimeout(timeout)
167+
clearTimeout(requestIdleTimeout)
168+
clearTimeout(maxDurationTimeout)
161169
}
162170

163171
stats.endAt = new Date()

test/spark.js

+28
Original file line numberDiff line numberDiff line change
@@ -420,3 +420,31 @@ test('calculateDelayBeforeNextTask() introduces random jitter for zero tasks in
420420
`expected delay values to be within 1 second of each other. Actual values: ${delay1} <> ${delay2}`
421421
)
422422
})
423+
424+
test('fetchCAR triggers timeout after long retrieval', async () => {
425+
const sleep = (ms) => new Promise(resolve => setTimeout(resolve, ms))
426+
const fetch = async (_url, { signal }) => {
427+
return {
428+
status: 200,
429+
ok: true,
430+
body: (async function * () {
431+
while (true) {
432+
if (signal.aborted) {
433+
throw new DOMException('Aborted', 'AbortError')
434+
}
435+
yield new Uint8Array([0])
436+
await sleep(500)
437+
}
438+
})()
439+
}
440+
}
441+
442+
const spark = new Spark({ fetch })
443+
const stats = newStats()
444+
445+
await spark.fetchCAR('http', '/dns/example.com/tcp/80/http', KNOWN_CID, stats, {
446+
maxRequestDurationMs: 0
447+
})
448+
449+
assertEquals(stats.timeout, true)
450+
})

0 commit comments

Comments
 (0)