# coding: utf-8 # # Least Squares using the SVD # In[2]: import numpy as np import numpy.linalg as la import scipy.linalg as spla # In[19]: # tall and skinny w/nullspace np.random.seed(12) A = np.random.randn(6, 4) b = np.random.randn(6) A[3] = A[4] + A[5] A[1] = A[5] + A[1] A[2] = A[3] + A[1] A[0] = A[3] + A[1] # ### Part I: Singular least squares using QR # # Let's see how successfully we can solve the least squares problem **when the matrix has a nullspace** using QR: # In[4]: Q, R = la.qr(A) # In[5]: R.round(3) # We can choose `x_qr[3]` as we please: # In[6]: x_qr = np.zeros(A.shape[1]) # In[20]: x_qr[3] = 0 # In[21]: QTbnew = Q.T.dot(b)[:3,] - R[:3, 3] * x_qr[3] x_qr[:3] = spla.solve_triangular(R[:3,:3], QTbnew, lower=False) # Let's take a look at the residual norm and the norm of `x_qr`: # In[22]: R.dot(x_qr)-Q.T.dot(b)[:4] # In[23]: la.norm(A.dot(x_qr)-b, 2) # In[24]: la.norm(x_qr, 2) # Choose a different `x_qr[3]` and compare residual and norm of `x_qr`. # -------------- # ### Part II: Solving least squares using the SVD # Now compute the SVD of $A$: # In[25]: U, sigma, VT = la.svd(A) # Make a matrix `Sigma` of the correct size: # In[26]: Sigma = np.zeros(A.shape) Sigma[:4,:4] = np.diag(sigma) # And check that we've actually factorized `A`: # In[27]: (U.dot(Sigma).dot(VT) - A).round(4) # Now define `Sigma_pinv` as the "pseudo-"inverse of `Sigma`, where "pseudo" means "don't divide by zero": # In[28]: Sigma_pinv = np.zeros(A.shape).T Sigma_pinv[:3,:3] = np.diag(1/sigma[:3]) Sigma_pinv.round(3) # Now compute the SVD-based solution for the least-squares problem: # In[29]: x_svd = VT.T.dot(Sigma_pinv).dot(U.T).dot(b) # In[30]: la.norm(A.dot(x_svd)-b, 2) # In[31]: la.norm(x_svd) # * What do you observe about $\|\text{x_svd}\|_2$ compared to $\|\text{x_qr}\|_2$? # * Is $\|\text{x_svd}\|_2$ compared to $\|\text{x_qr}\|_2$?