#!/usr/bin/env python # coding: utf-8 # # Newton's Method # # and some experiments. In Cedric Villani's book, "Birth of a Theorem", he writes that Newton's method decays updates exponentially. I wanted to test this out. # # ### Imports # In[1]: import numpy as np import matplotlib.pyplot as plt from mpl_toolkits import mplot3d # ### Newton's Method and Gradient # In[2]: def tangent(f, x, h=0.0001): return (f(x+h) - f(x))/h + 0.0001 def newtonMethod(f, x0, tol=0.0001, maxIter=500): x = x0 numIter = 0 roots = [] roots.append(x) for i in range(maxIter): x = x - f(x)/tangent(f, x) numIter += 1 roots.append(x) if abs(f(x)) < tol: return x, numIter, roots return x, numIter, roots # ### Define function # # Modify the below cell to test on any custom function # In[3]: def f(x): return x**5 - 1 #change this line x = np.linspace(-20, 20, 100) x = np.linspace(0.0001, 20, 100) y = f(x) plt.plot(x, y) # ### Visualize roots # # Initialize any start point in the below cell. Note that there are cases when Newton's method will overshoot or get stuck in a cycle (among a bunch of other issues). # # - [Overshoot](https://en.wikipedia.org/wiki/Newton%27s_method#Overshoot) # - [Bad starting points](https://en.wikipedia.org/wiki/Newton%27s_method#Bad_starting_points) # - [Derivative issues](https://en.wikipedia.org/wiki/Newton%27s_method#Derivative_issues) # In[4]: start = 30 x = np.linspace(-start, start, 1000) y = f(x) plt.figure(figsize=(30, 20)) plt.plot(x, y) roots = newtonMethod(f, start)[2] for root in roots: plt.plot(root, f(root), label='root = ' + str(root), marker='o') plt.legend() plt.title('Newton Method for finding roots of f(x) = x^5 - 1') plt.show() # ### Root decay visualization # # Given a starting point, see how fast the root decays to the optimal solution. We have to make sure we pass values in the domain of the function chosen. # In[5]: start = 300 starts = np.linspace(-start, start, 10) # use this for functions that take negative values. # starts = np.linspace(0.0001, start, 10) # use this for functions like np.log() roots = [] for start in starts: roots.append(newtonMethod(f, start)[2]) fig = plt.figure(figsize=(30, 20)) ax = plt.axes(projection='3d') for start in starts: for root in roots: for k in range(len(root)): ax.scatter3D(start, root[k], k, c='r') ax.set_xlabel('start') ax.set_ylabel('root') ax.set_zlabel('iteration') plt.show() # ### Convergence time visualization # # Check the number of iterations it takes to converge to the root. Blow-ups or loops are upper-bounded at 500. # In[6]: start = 3000 starts = np.linspace(-start, start, 1000) # use this for functions that take negative values. # starts = np.linspace(0.0001, start, 100) # use this for functions like np.log() iters = [] for start in starts: iters.append(newtonMethod(f, start)[1]) plt.figure(figsize=(30, 20)) plt.plot(starts, iters, 'o') plt.title('Number of iterations for different starting points') plt.show() # ### Functions I tried # # $$f(x) = \sin{(x)} + x - 1$$ # # $$f(x) = x^5 - 1$$ # # $$f(x) = 4\ln{(x)} - x$$ #