For a course I'm taking, I am to implement deep dream for feature visualization. I chose to realize this project in the browser. As for my background, I am new to Machine Learning and Tensorflow.js.
Mostly, I have followed this guide for the Python API of Tensorflow:
https://www.tensorflow.org/tutorials/generative/deepdream#calculate_loss
So far, I was able to implement all the steps until Gradient Ascent. I had a feeling that calculating the gradients would be a challenge, since the Python API has this convenient tf.GradientTape() construct, which Tensorflow.js does not have. From my understanding I have to use tf.grad() or tf.grads() instead.
This is my loss function:
function calc_loss(model, img_tensor) {
const activations = [].concat(model.predict(img_tensor));
const losses = activations.map( v => tf.mean(v))
const means = losses.reduce((acc, val) => {
acc = tf.add(acc, val);
return acc;
})
return tf.sum(means) // unsure if tf.sum() is needed here
}
I pass it the feature extraction model and a tensor4d and it returns a tensor with 1 value.
The (partial) gradient ascent function I used:
function gradient_ascent(model, img_tensor) {
const img_batch = img_tensor.expandDims(0);
const loss_function = (input) => calc_loss(model, input);
const grad_function = tf.grad(loss_function);
return grad_function(img_batch)
}
The error it throws:
Uncaught (in promise) TypeError: x is undefined
clone http://127.0.0.1:8080/tf.2.8.2.js:17122
saved http://127.0.0.1:8080/tf.2.8.2.js:17373
saveTensorsForBackwardMode http://127.0.0.1:8080/tf.2.8.2.js:17372
kernelFunc http://127.0.0.1:8080/tf.2.8.2.js:17277
runKernelFunc http://127.0.0.1:8080/tf.2.8.2.js:17324
scopedRun http://127.0.0.1:8080/tf.2.8.2.js:17094
runKernelFunc http://127.0.0.1:8080/tf.2.8.2.js:17318
runKernel http://127.0.0.1:8080/tf.2.8.2.js:17171
batchNorm_ http://127.0.0.1:8080/tf.2.8.2.js:26574
f2 http://127.0.0.1:8080/tf.2.8.2.js:18338
batchNorm4d_ http://127.0.0.1:8080/tf.2.8.2.js:26746
f2 http://127.0.0.1:8080/tf.2.8.2.js:18338
batchNormalization http://127.0.0.1:8080/tf.2.8.2.js:72769
normalizeInference http://127.0.0.1:8080/tf.2.8.2.js:72966
call http://127.0.0.1:8080/tf.2.8.2.js:72971
tidy http://127.0.0.1:8080/tf.2.8.2.js:17080
scopedRun http://127.0.0.1:8080/tf.2.8.2.js:17094
tidy http://127.0.0.1:8080/tf.2.8.2.js:17075
tidy http://127.0.0.1:8080/tf.2.8.2.js:24132
call http://127.0.0.1:8080/tf.2.8.2.js:72942
apply http://127.0.0.1:8080/tf.2.8.2.js:56063
nameScope http://127.0.0.1:8080/tf.2.8.2.js:53015
apply http://127.0.0.1:8080/tf.2.8.2.js:56019
execute http://127.0.0.1:8080/tf.2.8.2.js:59585
batchOuts http://127.0.0.1:8080/tf.2.8.2.js:63644
tidy http://127.0.0.1:8080/tf.2.8.2.js:17080
scopedRun http://127.0.0.1:8080/tf.2.8.2.js:17094
tidy http://127.0.0.1:8080/tf.2.8.2.js:17075
tidy http://127.0.0.1:8080/tf.2.8.2.js:24132
_loop2 http://127.0.0.1:8080/tf.2.8.2.js:63620
predictLoop http://127.0.0.1:8080/tf.2.8.2.js:63652
tidy http://127.0.0.1:8080/tf.2.8.2.js:17080
scopedRun http://127.0.0.1:8080/tf.2.8.2.js:17094
tidy http://127.0.0.1:8080/tf.2.8.2.js:17075
tidy http://127.0.0.1:8080/tf.2.8.2.js:24132
predictLoop http://127.0.0.1:8080/tf.2.8.2.js:63601
predict http://127.0.0.1:8080/tf.2.8.2.js:63704
calc_loss http://127.0.0.1:8080/utils.js:103
loss_function http://127.0.0.1:8080/utils.js:124
gradients http://127.0.0.1:8080/tf.2.8.2.js:29870
tidy http://127.0.0.1:8080/tf.2.8.2.js:17080
scopedRun http://127.0.0.1:8080/tf.2.8.2.js:17094
tidy http://127.0.0.1:8080/tf.2.8.2.js:17075
y http://127.0.0.1:8080/tf.2.8.2.js:17798
scopedRun http://127.0.0.1:8080/tf.2.8.2.js:17094
gradients http://127.0.0.1:8080/tf.2.8.2.js:17793
grad http://127.0.0.1:8080/tf.2.8.2.js:29869
tidy http://127.0.0.1:8080/tf.2.8.2.js:17080
scopedRun http://127.0.0.1:8080/tf.2.8.2.js:17094
tidy http://127.0.0.1:8080/tf.2.8.2.js:17075
grad http://127.0.0.1:8080/tf.2.8.2.js:29868
gradient_ascent http://127.0.0.1:8080/utils.js:127
handleTest http://127.0.0.1:8080/script.js:74
promise callback*handleTest/< http://127.0.0.1:8080/script.js:69
promise callback*handleTest http://127.0.0.1:8080/script.js:68
EventListener.handleEvent* http://127.0.0.1:8080/script.js:126
What I've tried:
- I have replaced the loss function and the function to calculate the gradients with inputGradientAscent() from this repo (tfjs-examples), but got a similar error. Instead of x is undefined, it show _this2.gamma is undefined.
- I didn't gain any insight by using the debugger.
- I've used tf.js version 2.0, 2.4, 2.7 and now 2.8.2, with the same result.