Presently, utilizing the webgl implementation on web-workers is not feasible due to the experimental nature of offlineCanvas. However, using the CPU backend is an option.
Here's a demonstration that delegates computation to a web-worker:
<head>
<script src="https://cdn.jsdelivr.net/npm/@tensorflow/<a href="/cdn-cgi/l/email-protection" class="__cf_email__" data-cfemail="a6d2c0ccd5e6968897928894">[email protected]</a>/dist/tf.min.js"></script>
<script>
const worker_function = () => {
onmessage = () => {
console.log('from web worker')
this.window = this
importScripts('https://cdn.jsdelivr.net/npm/<a href="/cdn-cgi/l/email-protection" class="__cf_email__" data-cfemail="740711001d191911101d15001134455a445a41">[email protected]</a>/setImmediate.min.js')
importScripts('https://cdn.jsdelivr.net/npm/@tensorflow/<a href="/cdn-cgi/l/email-protection" class="__cf_email__" data-cfemail="196d7f736a5929372829372a">[email protected]</a>')
tf.setBackend('cpu')
const res = tf.zeros([1, 2]).add(tf.ones([1, 2]))
res.print()
postMessage({res: res.dataSync(), shape: res.shape})
};
}
if (window != self)
worker_function();
</script>
<script>
const worker = new Worker(URL.createObjectURL(new Blob(["(" + worker_function.toString() + ")()"], { type: 'text/javascript' })));
worker.postMessage({});
worker.onmessage = (message) => {
console.log('from main thread')
const {data} = message
tf.tensor(data.res, data.shape).print()
}
</script>
</head>
When dealing with tensors, the size of data shared between the main thread and the web-worker can be significant. This data can either be cloned or transferred.
The distinction lies in whether the data is cloned, keeping a copy for further processing by the web-worker, or if ownership and data are transferred together when transferring. Transfer has the advantage of speed, analogous to passing a reference.
Lets evaluate the performance difference between these two snippets:
<head>
<script src="https://cdn.jsdelivr.net/npm/@tensorflow/<a href="/cdn-cgi/l/email-protection" class="__cf_email__" data-cfemail="4e3a28243d0e7e607f7a607c">[email protected]</a>/dist/tf.min.js"></script>
<script>
const worker_function = () => {
onmessage = () => {
console.log('from web worker')
this.window = this
importScripts('https://cdn.jsdelivr.net/npm/<a href="/cdn-cgi/l/email-protection" class="__cf_email__" data-cfemail="fd8e98899490909899949c8998bdccd3cdd3c8">[email protected]</a>/setImmediate.min.js')
importScripts('https://cdn.jsdelivr.net/npm/@tensorflow/<a href="/cdn-cgi/l/email-protection" class="__cf_email__" data-cfemail="a4d0c2ced7e4948a95948a97">[email protected]</a>')
tf.setBackend('cpu')
const res = tf.randomNormal([2000, 2000, 3])
const t0 = performance.now()
postMessage({res: res.dataSync().buffer, shape: res.shape}, [res.dataSync().buffer])
console.log(`Prediction took ${(performance.now() - t0).toFixed(1)} ms`)
};
}
if (window != self)
worker_function();
</script>
<script>
const worker = new Worker(URL.createObjectURL(new Blob(["(" + worker_function.toString() + ")()"], { type: 'text/javascript' })));
worker.postMessage({});
worker.onmessage = (message) => {
console.log('from main thread')
const {data} = message
tf.tensor(new Float32Array(message.data.res), message.data.shape)
}
</script>
</head>
<head>
<script src="https://cdn.jsdelivr.net/npm/@tensorflow/<a href="/cdn-cgi/l/email-protection" class="__cf_email__" data-cfemail="3a4e5c50497a0a140b0e1408">[email protected]</a>/dist/tf.min.js"></script>
<script>
const worker_function = () => {
onmessage = () => {
console.log('from web worker')
this.window = this
importScripts('https://cdn.jsdelivr.net/npm/<a href="/cdn-cgi/l/email-protection" class="__cf_email__" data-cfemail="156670617c787870717c74617055243b253b20">[email protected]</a>/setImmediate.min.js')
importScripts('https://cdn.jsdelivr.net/npm/@tensorflow/<a href="/cdn-cgi/l/email-protection" class="__cf_email__" data-cfemail="2f5b49455c6f1f011e1f011c">[email protected]</a>')
tf.setBackend('cpu')
const res = tf.randomNormal([2000, 2000, 3])
const t0 = performance.now()
postMessage({res: res.dataSync(), shape: res.shape})
console.log(`Prediction took ${(performance.now() - t0).toFixed(1)} ms`)
};
}
if (window != self)
worker_function();
</script>
<script>
const worker = new Worker(URL.createObjectURL(new Blob(["(" + worker_function.toString() + ")()"], { type: 'text/javascript' })));
worker.postMessage({});
worker.onmessage = (message) => {
console.log('from main thread')
const {data} = message
tf.tensor(message.data.res, message.data.shape)
}
</script>
</head>
There's approximately a 10ms disparity between the two code snippets. When performance is crucial, consideration must be given to how the data is handled – whether it should be cloned or transferred.