Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

added ReLU #1

Merged
merged 4 commits into from
Dec 17, 2018
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
added other complex ReLUs
  • Loading branch information
twhughes committed Dec 6, 2018
commit e3ee66cc0a7db82f9bc925342fea79667b0b12a5
62 changes: 52 additions & 10 deletions neuroptica/nonlinearities.py
Original file line number Diff line number Diff line change
Expand Up @@ -297,45 +297,87 @@ def df_dZ(self, Z: np.ndarray):
class ReLU(ComplexNonlinearity):
'''
Discontinuous (but holomorphic and backpropable) ReLU
f(x_i) = alpha * x_i if |x_i| < cutoff
f(x_i) = x_i if |x_i| >= cutoff
f(x_i) = alpha * x_i if |x_i| < cutoff
f(x_i) = x_i if |x_i| >= cutoff

Arguments:
----------
cutoff: value of input |x_i| above which to fully transmit, below which to attentuate
alpha: attenuation factor f(x_i) = f
'''
def __init__(self, N, cutoff=1, alpha=0):
super().__init__(N, holomorphic=True)
self.cutoff = cutoff
self.alpha = alpha
super().__init__(N, holomorphic=True)

def forward_pass(self, X: np.ndarray):
return (np.abs(X) > self.cutoff) * X + (np.abs(X) <= self.cutoff) * self.alpha * X
return (np.abs(X) >= self.cutoff) * X + (np.abs(X) < self.cutoff) * self.alpha * X

def df_dZ(self, Z: np.ndarray):
return (np.abs(Z) > self.cutoff) * 1 + (np.abs(Z) <= self.cutoff) * self.alpha * 1
return (np.abs(Z) >= self.cutoff) * 1 + (np.abs(Z) < self.cutoff) * self.alpha * 1


class modReLU(ComplexNonlinearity):
'''
Contintous, but non-holomorphic and non-simply backpropabable ReLU of the form
f(z) = (|z| - cutoff) * z / |z| if |z| > cutoff (else 0)
f(z) = (|z| - cutoff) * z / |z| if |z| >= cutoff (else 0)
see: https://arxiv.org/pdf/1705.09792.pdf (note, cutoff subtracted in this definition)

Arguments:
----------
cutoff: value of input |x_i| above which to
'''
def __init__(self, N, cutoff=1):
self.cutoff = cutoff
super().__init__(N, holomorphic=False, mode="polar")
self.cutoff = cutoff

def forward_pass(self, X: np.ndarray):
return (np.abs(X) > self.cutoff) * ( np.abs(X) - self.cutoff ) * X / np.abs(X)
return (np.abs(X) >= self.cutoff) * ( np.abs(X) - self.cutoff ) * X / np.abs(X)

def df_dr(self, r: np.ndarray, phi: np.ndarray):
return (r > self.cutoff) * np.exp(1j * phi)
return (r >= self.cutoff) * np.exp(1j * phi)

def df_dphi(self, r: np.ndarray, phi: np.ndarray):
return (r > self.cutoff) * 1j * (r - self.cutoff) * np.exp(1j * phi)
return (r >= self.cutoff) * 1j * (r - self.cutoff) * np.exp(1j * phi)


class cReLU(ComplexNonlinearity):
'''
Contintous, but non-holomorphic and non-simply backpropabable ReLU of the form
f(z) = ReLU(Re{z}) + 1j * ReLU(Im{z})
see: https://arxiv.org/pdf/1705.09792.pdf
'''
def __init__(self, N):
super().__init__(N, holomorphic=False, mode="condensed")

def forward_pass(self, X: np.ndarray):
X_re = np.real(X)
X_im = np.imag(X)
return (X_re > 0) * X_re + 1j * (X_im > 0) * X_im

def df_dRe(self, a: np.ndarray, b: np.ndarray) -> np.ndarray:
return (a > 0)

def df_dIm(self, a: np.ndarray, b: np.ndarray) -> np.ndarray:
return 1j * (b > 0)


class zReLU(ComplexNonlinearity):
'''
Contintous, but non-holomorphic and non-simply backpropabable ReLU of the form
f(z) = z if Re{z} > 0 and Im{z} > 0, else 0
see: https://arxiv.org/pdf/1705.09792.pdf
'''
def __init__(self, N):
super().__init__(N, holomorphic=False, mode="condensed")

def forward_pass(self, X: np.ndarray):
X_re = np.real(X)
X_im = np.imag(X)
return (X_re > 0) * (X_im > 0) * X

def df_dRe(self, a: np.ndarray, b: np.ndarray) -> np.ndarray:
return (a > 0) * (b > 0)

def df_dIm(self, a: np.ndarray, b: np.ndarray) -> np.ndarray:
return (a > 0) * (b > 0) * 1j
4 changes: 3 additions & 1 deletion tests/test_nonlinearities.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,9 @@ def test_OpticalMesh_adjoint_optimize(self):
SPMActivation(N, 1),
LinearMask(N, mask=np.random.rand(N)),
ReLU(N, cutoff=0.5, alpha=0.1),
modReLU(N, cutoff=0.5)]
modReLU(N, cutoff=0.5),
cReLU(N),
zReLU(N)]
for nonlinearity in nonlinearities:

print("Testing nonlinearity {}".format(nonlinearity))
Expand Down