Skip to content

Commit

Permalink
feat: PSR working but x is padded
Browse files Browse the repository at this point in the history
  • Loading branch information
MatteoRobbiati committed Oct 28, 2024
1 parent c1534e1 commit 687e843
Show file tree
Hide file tree
Showing 3 changed files with 2 additions and 102 deletions.
100 changes: 0 additions & 100 deletions exercise.py

This file was deleted.

2 changes: 1 addition & 1 deletion src/qiboml/models/pytorch.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ def __post_init__(
super().__init__()

params = [p for param in self.circuit.get_parameters() for p in param]
params = torch.as_tensor(self.backend.to_numpy(params)).ravel()
params = torch.as_tensor(self.backend.to_numpy(x=params)).ravel()
params.requires_grad = True
self.circuit_parameters = torch.nn.Parameter(params)

Expand Down
2 changes: 1 addition & 1 deletion src/qiboml/operations/differentiation.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def evaluate(self, x: ndarray, encoding, training, decoding, backend, *parameter
"Parameter Shift Rule only supports expectation value decoding.",
)
x = encoding(x) + training
gradients = []
gradients = [np.array([[0.0]])]
for i in range(len(parameters)):
gradients.append(
self.one_parameter_shift(
Expand Down

0 comments on commit 687e843

Please sign in to comment.