Hi,
I’m trying to stream a large video file. To do this, and for reasons connected to the remote server, I need to fetch it in chunks of no more than 10MB, rather than fetching the whole file in one request.
I’m using a version of the following code, which gets me most of the way there. However, fetching a large file means I am sending more fetch requests in parallel than the CF limit (6) allows: I need to batch them into batches of no more than 6 parallel requests.
/**
* Make multiple requests,
* aggregate the responses and
* stream it back as a single response.
*/
async function fetchAndApply(request) {
const fetches = [
"https://www.myvideofile.xyz/file.mp4?part=1",
"https://www.myvideofile.xyz/file.mp4?part=2",
"https://www.myvideofile.xyz/file.mp4?part=3",
"https://www.myvideofile.xyz/file.mp4?part=.......",
].map(url => fetch(url))
// Create a pipe and stream the response bodies
let { readable, writable } = new TransformStream()
streamResponseBodies(fetches, writable)
return new Response(readable)
}
async function streamResponseBodies(fetches, writable) {
let writer = writable.getWriter()
for (let i = 0; i < fetches.length; ++i) {
let response = await fetches[i]
await manualPipeTo(response.body.getReader(), writer)
}
await writer.close()
}
async function manualPipeTo(reader, writer) {
while (true) {
const { done, value } = await reader.read()
if (done) break
await writer.write(value)
}
}
I’ve tried using loops but no luck so far. Can anybody help me to adapt this to batch the requests into groups of no more than 6?
Thanks.