#!/usr/bin/env python # coding: utf-8 # In[1]: #imports import numpy as np from numpy import linalg,random # In this tutorial, we will make use of numpy's ```linalg``` module. # In[2]: # first, initialize new numpy array a = np.array([[1],[2],[3]]) a # --- # We can compute various norms using the ```linalg.norm``` function: # In[3]: linalg.norm(a) # L2 / Euclidean norm # In[4]: linalg.norm(a, ord=1) # L1 norm # In[5]: linalg.norm(a, ord=np.inf) # inf-norm / max norm # --- # Computing the Determinant: # In[6]: # initialize matrix A A = np.array([[1,0,3],[4,-1,6],[7,8,3]]) A # In[7]: # compute determinant linalg.det(A) # In[8]: # initialize matrix B B = np.array([[1,2,3],[2,4,6],[3,6,9]]) B # In[9]: # compute determinant linalg.det(B) # In[10]: # compute inverse try: linalg.inv(B) # throws exception because matrix is singular except Exception as e: print(e) # --- # Computing eigenvalues and eigenvectors: # In[11]: eigvals, eigvecs = linalg.eig(A) # In[12]: print(eigvals) # vector of eigenvalues # In[13]: print(eigvecs) # eigenvectors are columns of the matrix # Note that ```eig``` does not guarantee that eigenvalues are returned in sorted order. # # For symmetric matrices, use ```eigh``` instead. This is more efficient and numerically reliable because it takes advantage of the fact that the matrix is symmetric. Moreover, the eigenvalues are returned in ascending order. # In[14]: # initialize symmetric matrix S = np.matmul(A, A.transpose()) S # In[15]: eigvals, eigvecs = linalg.eigh(S) # In[16]: print(eigvals) # In[17]: print(eigvecs) # If you only want eigenvalues, you can use the ```linalg.eigvals``` and ```linalg.eigvalsh``` functions. # --- # Let's try to recompose the matrix from eigenvalues and eigenvectors: # In[18]: A # In[19]: L, V = linalg.eig(A) # eigenvalues and eigenvectors # In[20]: A_recomposed = np.matmul(V, np.matmul(np.diag(L), linalg.inv(V))) # recompose matrix A_recomposed # In[21]: linalg.norm(A-A_recomposed) # --- # Let's do the same thing with SVD: # In[22]: A = random.rand(15).reshape(3,5) A # In[23]: U, S, V_T = linalg.svd(A) #Notice S is a one dimensional vector print(f"U =\n{U}\n{15*'-'}\nS =\n{S}\n{15*'-'}\nV_T = \n{V_T}") # In[24]: A_recomposed = np.matmul(U, np.matmul(np.diag([*S, 0, 0])[0:3], V_T)) print(f"U * S* V_T = \n{A_recomposed}") # In[25]: print(f"Norm of the diff is: {linalg.norm(A-A_recomposed)}") # --- # Let's see what happens if the number of rows were more than columns: # In[26]: A = A.T print(A) # In[27]: U, S, V_T = linalg.svd(A) #Notice S is a one dimensional vector print(f"U =\n{U}\n{15*'-'}\nS =\n{S}\n{15*'-'}\nV_T = \n{V_T}") # Notice the relation with the previous example U and V changed places. # In[28]: A_recomposed = np.matmul(U, np.matmul(np.diag([*S, 0, 0])[:,0:3], V_T)) print(f"U * S* V_T = \n{A_recomposed}") # In[29]: print(f"Norm of the diff is: {linalg.norm(A-A_recomposed)}") # --- # Finally if a symmetric matrix has distinct eigenvalues, Then SVD coincides with eigendecomposition(Up to sign change): # In[30]: A = random.rand(25).reshape(5,5) A = np.matmul(A, A.T) A # In[31]: U, S, V_T = linalg.svd(A) eigvals, eigvecs = linalg.eigh(A) print(list(S), list(reversed(eigvals)), sep='\n') # Here, numbers are not exactly equal. That is because floating point arithmatic is not exact. # In[32]: print(eigvecs[:,-1::-1]) # In[33]: print(f"U =\n{U}\n{15*'-'}\n V_T =\n{V_T}") # U and eigenvectors are same(up to sign). Also, U and V are equal. # # --- # When $A$ is symmetric we have $A = A^T$. By SVD we have $A = U \times diag(S) \times V^T$ so $A^T = V \times diag(S) \times U^T$. For reasons out of scope of this class SVD is unique (up to change of sign) so: # # $A = A^T \implies V = U$ # # in this example numbers are not exactly the same but $V ≈ E$. Again becuase of floating point arithmatic. # # By defenition of SVD, V and U are orthogonal and we just saw $U = V$: # # $V^T \times V = I \implies V^T = V^{-1} \implies V^T = U^{-1}$ # # Since $A = U \times diag(S) \times V^T$ We have: # # $A = U \times diag(S) \times U^{-1}$ # # If eigenvalues are different this is the only eigendecomposition.