~\AppData\Local\Continuum\anaconda3\lib\site-packages\torch\tensor.py in backward(self, gradient, retain_graph, create_graph) 193 products. Defaults to ``False``. 194""" --> 195 torch.autograd.backward(self, gradient, retain_graph, create_graph) 196 197 def register_hook(self, hook): ~\AppData\Local\Continuum\anaconda3\lib\site-packages\torch\autograd\__init__.py in backward(tensors, grad_tensors, retain_graph, create_graph, grad_variables) 91 grad_tensors = list(grad_tensors) 92 ---> 93 grad_tensors = _make_grads(tensors, grad_tensors) 94 if retain_graph is None: 95 retain_graph = create_graph ~\AppData\Local\Continuum\anaconda3\lib\site-packages\torch\autograd\__init__.py in _make_grads(outputs, grads) 32 if out.requires_grad: 33 if out.numel() != 1: ---> 34 raise RuntimeError("grad can be implicitly created only for scalar outputs") 35 new_grads.append(torch.ones_like(out, memory_format=torch.preserve_format)) 36 else: RuntimeError: grad can be implicitly created only for scalar outputs