Performs back-propagation on a Torch Tensor, with an assumed external_gradient of ones.
| Type | Intent | Optional | Attributes | Name | ||
|---|---|---|---|---|---|---|
| type(torch_tensor), | intent(in) | :: | tensor |
Tensor to compute gradients of |
||
| logical, | intent(in), | optional | :: | retain_graph |
Should the computational graph be retained? |
subroutine torch_tensor_backward_without_external_gradient(tensor, retain_graph) use, intrinsic :: iso_c_binding, only : c_bool, c_int64_t type(torch_tensor), intent(in) :: tensor !! Tensor to compute gradients of logical, optional, intent(in) :: retain_graph !! Should the computational graph be retained? ! Local arguments logical(c_bool) :: retain_graph_value integer(c_int64_t) :: sizes(1) interface subroutine torch_tensor_backward_without_external_gradient_c(tensor_c, retain_graph_c) & bind(c, name = 'torch_tensor_backward_without_external_gradient') use, intrinsic :: iso_c_binding, only : c_bool, c_ptr implicit none type(c_ptr), value, intent(in) :: tensor_c logical(c_bool), value, intent(in) :: retain_graph_c end subroutine torch_tensor_backward_without_external_gradient_c end interface if (tensor%get_rank() /= 1) then write(*,*) "Error :: external gradient can only be implicitly created for scalar fields" stop 1 end if sizes(:) = tensor%get_shape() if (sizes(1) /= 1) then write(*,*) "Error :: external gradient can only be implicitly created for scalar fields" stop 1 end if ! Do not retain the graph by default if (present(retain_graph)) then retain_graph_value = retain_graph else retain_graph_value = .false. end if ! Call back-propagation call torch_tensor_backward_without_external_gradient_c(tensor%p, retain_graph_value) end subroutine torch_tensor_backward_without_external_gradient