import numpy as np import matplotlib.pyplot as plt # Problem data. m = 15 n = 10 np.random.seed(2010) A = np.random.randn(m, n) b = np.random.randn(m) gamma_vals = np.logspace(-3, 3) sq_penalty = [] l2_penalty = [] xhat_values = [] for gamma in gamma_vals: A_tilde = np.vstack((A,np.sqrt(gamma)*np.eye(n))) b_tilde = np.hstack((b,np.zeros(n))) xhat = np.linalg.lstsq(A_tilde, b_tilde, rcond=None)[0] sq_val = np.linalg.norm(A.dot(xhat)-b)**2 l2_val = np.linalg.norm(xhat)**2 xhat_values.append(xhat) sq_penalty.append(sq_val) l2_penalty.append(l2_val) # Plot trade-off curve. plt.figure(figsize=(12,9), dpi=100) plt.plot(l2_penalty, sq_penalty) plt.xlabel(r'$||x||_2^2$') plt.ylabel(r'$||Ax-b||_2^2$') plt.title('Trade-Off Curve for Ridge regression') plt.grid() plt.show() # Plot entries of x vs. gamma. plt.figure(figsize=(12,9), dpi=100) for i in range(n): plt.semilogx(gamma_vals, [xi[i] for xi in xhat_values]) plt.xlabel(r'$\gamma$') plt.ylabel(r'$x_{i}$') plt.title(r'Entries of $x$ vs. $\gamma$') plt.grid() plt.show() import numpy as np import matplotlib.pyplot as plt data = np.loadtxt('https://jonghank.github.io/ee370/files/fit_data.csv', \ delimiter=',') x, y = data[:,0], data[:,1] plt.figure(figsize=(6,6), dpi=100) plt.plot(x, y, 'o', alpha=0.5) plt.grid() plt.axis('square') plt.xlim(0, 1) plt.ylim(0, 1) plt.xlabel(r'$x$') plt.ylabel(r'$y$') plt.title('Raw data') plt.show() n = len(x) d = 6 X = np.zeros((n,d)) for i in range(d): X[:,i] = x**i theta_opt = np.linalg.lstsq(X, y, rcond=None)[0] residual = y - X@theta_opt print(f'Optimal theta: {theta_opt}') print(f'RMSE: {np.linalg.norm(residual)/np.sqrt(n)}') vp = np.linspace(0, 1, 100) X_vp = np.zeros((len(vp),d)) for i in range(d): X_vp[:,i] = vp**i; plt.figure(figsize=(6,6), dpi=100) plt.plot(x, y, 'o', alpha=0.5, label='Raw data') plt.plot(vp, np.dot(X_vp, theta_opt), label='Predictor') plt.grid() plt.axis('square') plt.xlim(0, 1) plt.ylim(0, 1) plt.xlabel(r'$x$') plt.ylabel(r'$y$') plt.title("Polynomial predictor") plt.legend() plt.show() n = len(x) d = 51 X = np.zeros((n,d)) for i in range(d): X[:,i] = x**i theta_opt = np.linalg.lstsq(X, y, rcond=None)[0] residual = y - X@theta_opt print(f'Optimal theta: {theta_opt}') print(f'RMSE: {np.linalg.norm(residual)/np.sqrt(n)}') vp = np.linspace(0, 1, 100) X_vp = np.zeros((len(vp),d)) for i in range(d): X_vp[:,i] = vp**i; plt.figure(figsize=(6,6), dpi=100) plt.plot(x, y, 'o', alpha=0.5, label='Raw data') plt.plot(vp, np.dot(X_vp, theta_opt), label='Predictor') plt.grid() plt.axis('square') plt.xlim(0, 1) plt.ylim(0, 1) plt.xlabel(r'$x$') plt.ylabel(r'$y$') plt.title("Polynomial predictor") plt.legend() plt.show() n = len(x) d = 51 X = np.zeros((n,d)) for i in range(d): X[:,i] = x**i gamma = 1.0e-7 X_tilde = np.vstack((X,np.sqrt(gamma)*np.eye(d))) y_tilde = np.hstack((y,np.zeros(d))) theta_opt = np.linalg.lstsq(X_tilde, y_tilde, rcond=None)[0] residual = y - X@theta_opt print(f'Optimal theta: {theta_opt}') print(f'RMSE: {np.linalg.norm(residual)/np.sqrt(n)}') vp = np.linspace(0, 1, 100) X_vp = np.zeros((len(vp),d)) for i in range(d): X_vp[:,i] = vp**i; plt.figure(figsize=(6,6), dpi=100) plt.plot(x, y, 'o', alpha=0.5, label='Raw data') plt.plot(vp, np.dot(X_vp, theta_opt), label='Predictor') plt.grid() plt.axis('square') plt.xlim(0, 1) plt.ylim(0, 1) plt.xlabel(r'$x$') plt.ylabel(r'$y$') plt.title("Polynomial predictor") plt.legend() plt.show()