Skip to content

Commit

Permalink
add tests for losses (CCE currently failing)
Browse files Browse the repository at this point in the history
  • Loading branch information
bencbartlett committed Jan 9, 2019
1 parent 117b0bf commit a2f2a2c
Show file tree
Hide file tree
Showing 3 changed files with 51 additions and 1 deletion.
3 changes: 2 additions & 1 deletion neuroptica/losses.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 43,7 @@ class CategoricalCrossEntropy(Loss):
@staticmethod
def L(X: np.ndarray, T: np.ndarray) -> np.ndarray:
X_softmax = np.exp(X) / np.sum(np.exp(X), axis=0)
X_clip = np.clip(X_softmax, 1e-7, 1 - 1e-7)
X_clip = np.clip(X_softmax, 1e-9, 1 - 1e-9)
return -np.sum(T * np.log(X_clip), axis=0)

@staticmethod
Expand All @@ -54,3 54,4 @@ def dL(X: np.ndarray, T: np.ndarray) -> np.ndarray:
return -T / X_clip
else:
return np.conj(X - T)
# return X - T
47 changes: 47 additions & 0 deletions tests/test_losses.py
Original file line number Diff line number Diff line change
@@ -0,0 1,47 @@
import unittest

from neuroptica.layers import Activation, ClementsLayer
from neuroptica.losses import CategoricalCrossEntropy, MeanSquaredError
from neuroptica.models import Sequential
from neuroptica.nonlinearities import *
from neuroptica.optimizers import Optimizer
from tests.base import NeuropticaTest
from tests.test_models import TestModels


class TestLosses(NeuropticaTest):
'''Tests for model losses'''

def test_loss_gradients(self):
for N in [9, 10]:

losses = [MeanSquaredError, CategoricalCrossEntropy]

for loss in losses:

print("Testing loss {}".format(loss))

batch_size = 6
n_samples = batch_size * 4

X_all = (2 * np.random.rand(N * n_samples) - 1).reshape((N, n_samples))
Y_all = np.abs(X_all)

# Make a single-layer model
model = Sequential([ClementsLayer(N),
Activation(Abs(N))
])

for X, Y in Optimizer.make_batches(X_all, Y_all, batch_size):
# Propagate the data forward
Y_hat = model.forward_pass(X)
d_loss = loss.dL(Y_hat, Y)

# Compute the backpropagated signals for the model
gradients = model.backward_pass(d_loss)

TestModels.verify_model_gradients(model, X, Y, loss.L, gradients, epsilon=1e-6)


if __name__ == "__main__":
unittest.main()
2 changes: 2 additions & 0 deletions tests/test_nonlinearities.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 41,8 @@ def test_OpticalMesh_adjoint_optimize(self):

# nonlinearities that may be applied to complex outpus
nonlinearities_complex = [Abs(N, mode="full"),
Abs(N, mode="condensed"),
Abs(N, mode="polar"),
SoftMax(N),
AbsSquared(N),
ElectroOpticActivation(N, **eo_settings),
Expand Down

0 comments on commit a2f2a2c

Please sign in to comment.