Interpolation with Generalized Vandermonde Matrices

In [1]:
import numpy as np
import numpy.linalg as la
import matplotlib.pyplot as pt
In [54]:
if True:
    def f(x):
        return np.exp(1.5*x)
    def df(x):
        return 1.5*np.exp(1.5*x)
else:
    def f(x):
        return np.sin(4*x)
    def df(x):
        return 4*np.cos(4*x)
In [55]:
x = np.linspace(0, 1, 1000)
pt.plot(x, f(x))
Out[55]:
[<matplotlib.lines.Line2D at 0x7f1db891d208>]

Fix some parameters:

In [80]:
degree = 2
h = 1

nodes = np.linspace(0, h, degree+1)
nodes
Out[80]:
array([ 0. ,  0.5,  1. ])

Build the Vandermonde matrix:

In [81]:
V = np.array([
    nodes**i
    for i in range(degree+1)
]).T
In [82]:
V
Out[82]:
array([[ 1.  ,  0.  ,  0.  ],
       [ 1.  ,  0.5 ,  0.25],
       [ 1.  ,  1.  ,  1.  ]])

Now find the interpolation coefficients as coeffs:

In [83]:
coeffs = la.solve(V, f(nodes))
In [84]:
interp = 0*x
for i in range(degree+1):
    interp += coeffs[i] * x**i
In [85]:
pt.plot(x, f(x), "--", color="gray", label="$f$")
pt.plot(x, interp, color="red", label="Interpolant")
pt.plot(nodes, f(nodes), "o")
pt.legend(loc="best")
Out[85]:
<matplotlib.legend.Legend at 0x7f1db84eebe0>

Now evaluate the derivative as interp_deriv:

In [86]:
interp_deriv = 0*x
for i in range(1, degree+1):
    interp_deriv += coeffs[i] * i * x**(i-1)
In [87]:
pt.plot(x, interp_deriv, color="red", label="Interpolant")
pt.plot(x, df(x), "--", color="gray", label="Derivative")
pt.legend(loc="best")
Out[87]:
<matplotlib.legend.Legend at 0x7f1db84730b8>
In [38]: