if (torch_is_installed()) {
## many unexported functions do 'backward' calculations (e.g. derivatives)
## These could be used as a part of custom autograd functions for example.
x <- torch_randn(10, requires_grad = TRUE)
y <- torch_tanh(x)
## calculate backwards gradient using standard torch method
y$backward(torch_ones_like(x))
x$grad
## we can get the same result by calling the unexported `torch_tanh_backward()`
## function. The first argument is 1 to setup the Jacobian-vector product.
## see https://pytorch.org/blog/overview-of-pytorch-autograd-engine/ for details.
call_torch_function("torch_tanh_backward", 1, y)
all.equal(call_torch_function("torch_tanh_backward", 1, y, quiet = TRUE), x$grad)
}
Run the code above in your browser using DataLab