diff --git a/autograd/numpy/__init__.py b/autograd/numpy/__init__.py index e1e22fb2c..ff49fb35e 100644 --- a/autograd/numpy/__init__.py +++ b/autograd/numpy/__init__.py @@ -1,6 +1,7 @@ from __future__ import absolute_import from . import numpy_wrapper from . import numpy_grads +from . import numpy_grads_interp from . import numpy_extra from .numpy_wrapper import * from . import linalg diff --git a/autograd/numpy/numpy_grads_interp.py b/autograd/numpy/numpy_grads_interp.py new file mode 100644 index 000000000..144da3e5f --- /dev/null +++ b/autograd/numpy/numpy_grads_interp.py @@ -0,0 +1,68 @@ +from . import numpy_wrapper as anp + +def _interp_vjp(x, xp, yp, left, right, period, g): + from autograd import vector_jacobian_product + func = vector_jacobian_product(_interp, argnum=2) + return func(x, xp, yp, left, right, period, g) + +def _interp(x, xp, yp, left=None, right=None, period=None): + """ A partial rewrite of interp that is differentiable against yp """ + if period is not None: + xp = anp.concatenate([[xp[-1] - period], xp, [xp[0] + period]]) + yp = anp.concatenate([anp.array([yp[-1]]), yp, anp.array([yp[0]])]) + return _interp(x % period, xp, yp, left, right, None) + + if left is None: left = yp[0] + if right is None: right = yp[-1] + + xp = anp.concatenate([[xp[0]], xp, [xp[-1]]]) + + yp = anp.concatenate([anp.array([left]), yp, anp.array([right])]) + m = make_matrix(x, xp) + y = anp.inner(m, yp) + return y + +anp.interp.defvjp(lambda g, ans, vs, gvs, x, xp, yp, left=None, right=None, period=None: + _interp_vjp(x, xp, yp, left, right, period, g), argnum=2) + + +# The following are internal functions + +import numpy as np + +def W(r, D): + """ Convolution kernel for linear interpolation. + D is the differences of xp. + """ + mask = D == 0 + D[mask] = 1.0 + Wleft = 1.0 + r[1:] / D + Wright = 1.0 - r[:-1] / D + # edges + Wleft = np.where(mask, 0, Wleft) + Wright = np.where(mask, 0, Wright) + Wleft = np.concatenate([[0], Wleft]) + Wright = np.concatenate([Wright, [0]]) + W = np.where(r < 0, Wleft, Wright) + W = np.where(r == 0, 1.0, W) + W = np.where(W < 0, 0, W) + return W + +def make_matrix(x, xp): + D = np.diff(xp) + w = [] + v0 = np.zeros(len(xp)) + v0[0] = 1.0 + v1 = np.zeros(len(xp)) + v1[-1] = 1.0 + for xi in x: + # left, use left + if xi < xp[0]: v = v0 + # right , use right + elif xi > xp[-1]: v = v1 + else: + v = W(xi - xp, D) + v[0] = 0 + v[-1] = 0 + w.append(v) + return np.array(w) diff --git a/tests/test_numpy_interp.py b/tests/test_numpy_interp.py new file mode 100644 index 000000000..2e43db595 --- /dev/null +++ b/tests/test_numpy_interp.py @@ -0,0 +1,41 @@ +from __future__ import absolute_import +import warnings + +import autograd.numpy as np +import autograd.numpy.random as npr +from autograd.util import * +from autograd import grad + +from numpy.testing import assert_allclose + +def test_interp(): + x = np.arange(-20, 20, 0.1) + xp = np.arange(10) * 1.0 + npr.seed(1) + yp = xp ** 0.5 + npr.normal(size=xp.shape) + def fun(yp): return to_scalar(np.interp(x, xp, yp)) + def dfun(yp): return to_scalar(grad(fun)(yp)) + + check_grads(fun, yp) + check_grads(dfun, yp) + +def test_interp_edge(): + x = np.arange(-20, 20, 0.1) + xp = np.arange(10) * 1.0 + npr.seed(1) + yp = xp ** 0.5 + npr.normal(size=xp.shape) + def fun(yp): return to_scalar(np.interp(x, xp, yp, left=-1, right=-1)) + def dfun(yp): return to_scalar(grad(fun)(yp)) + check_grads(fun, yp) + check_grads(dfun, yp) + +def test_interp_period(): + x = np.arange(-20, 20, 0.5) + xp = np.arange(10) * 1.0 + npr.seed(1) + yp = xp ** 0.5 + npr.normal(size=xp.shape) + def fun(yp): return to_scalar(np.interp(x, xp, yp, period=10)) + def dfun(yp): return to_scalar(grad(fun)(yp)) + + check_grads(fun, yp) + check_grads(dfun, yp)