Blob is not a Transferable object. Also, there is no .readAsSharedArrayBuffer method available on FileReader.
However, if you only need to read a Blob from multiple workers simultaneously, I believe you can achieve this with URL.createObjectURL() and fetch, although I have not tested this with multiple workers:
// === main thread ===
let objectUrl = URL.createObjectURL(blob);
worker1.postMessage(objectUrl);
worker2.postMessage(objectUrl);
// === worker 1 & 2 ===
self.onmessage = msg => {
fetch(msg.data)
.then(res => res.blob())
.then(blob => {
doSomethingWithBlob(blob);
});
};
Otherwise, as far as I can tell, there really isn't an efficient way to load data from a file into a SharedArrayBuffer.
I also provide a method here for transferring chunks of a blob from main thread to a single worker. For my use case, the files are too big to read the entire contents into a single array buffer anyway (shared or not), so I use .slice to deal in chunks. Something like this will let you deliver tons of data to a single worker in a stream-like fashion via multiple .postMessage calls using the Transferable ArrayBuffer:
// === main thread ===
let eof = false;
let nextBuffer = null;
let workerReady = true;
let read = 0;
function nextChunk() {
let end = read + chunkSize;
if(end >= file.length) {
end = file.length;
eof = true;
}
let slice = file.slice(read, end);
read = end;
fr.readAsArrayBuffer(slice);
}
fr.onload = event => {
let ab = event.target.result;
if(workerReady) {
worker.postMessage(ab, [ab]);
workerReady = false;
if(!eof) nextChunk();
}
else {
nextBuffer = ab;
}
};
// wait until the worker finished the last chunk
// ... otherwise we'll flood main thread's heap
worker.onmessage = msg => {
if(nextBuffer) {
worker.postMessage(nextBuffer, [nextBuffer]);
nextBuffer = null;
}
else if(!eof && msg.ready) {
nextChunk();
}
};
nextChunk();
// === worker ===
self.onmessage = msg => {
let ab = msg.data;
// ... do stuff with data ...
self.postMessage({ready:true});
};
This will read a chunk of data into an ArrayBuffer in the main thread, transfer that to the worker, and then read the next chunk into memory while waiting for worker to process the previous chunk. This basically ensures that both threads stay busy the whole time.