import numpy as np
import matplotlib.pyplot as plt
%matplotlib inline
plt.style.use('default')
plt.style.use('seaborn')
def huber(res, delta):
res = abs(res)
return (res<=delta)*(res**2/2) + (res>delta)*(delta*res-delta**2/2)
plt.figure(figsize=(8,5))
plt.axis([-4.2,4.2,-0.2,8.2])
x = np.linspace(-4,4,100)
plt.plot(x, x**2, label="squared loss", lw=2)
plt.plot(x,np.where(x>=0,x,-x), label="absolute loss", lw=2)
plt.plot(x,huber(x, 3), label="huber loss: $\delta$={}".format(3),lw=2, linestyle="-.")
plt.plot(x,huber(x, 1.5), label="huber loss: $\delta$={}".format(1.5),lw=2,linestyle="--")
plt.legend(loc='upper center', fontsize=12)
plt.xlabel("$y-f(x)$",fontsize=13)
plt.ylabel("$Loss$",fontsize=13)
plt.title("Loss Function for Regression",fontsize=15)
Text(0.5,1,'Loss Function for Regression')
fig = plt.figure(figsize=(8,5))
ax = fig.add_subplot(111)
ax.axis([-5,4,-0.2,6])
x = np.linspace(-8,8,100)
ax.plot(x, np.log2(1+np.exp(-x)), lw=2)
ax.set_xticks(np.array([]))
ax.set_yticks(np.array([]))
# plt.grid()
ax.text(-1.5,0.3,'$y \cdot f(x)$',size=13,horizontalalignment="center",verticalalignment="center")
ax.annotate("",xy=(0.5,0.3),xytext=(-1,0.3),arrowprops=dict(facecolor='k',shrink=0.05,width=2,headwidth=7))
ax.text(-4,4,'$loss$',rotation=90,size=15,horizontalalignment="center",verticalalignment="center")
ax.annotate("",xy=(-4,2),xytext=(-4,3.5),arrowprops=dict(facecolor='k',shrink=0.05,width=2,headwidth=7))
ax.spines['top'].set_visible(False)
ax.spines['right'].set_visible(False)
ax.spines['left'].set_visible(False)
ax.spines['bottom'].set_visible(False)
def huber_classification(margin):
return np.where(margin < -1, -4*margin, np.where(margin >= 1, 0, (1-margin)**2))
plt.figure(figsize=(8,5))
plt.axis([-4.2,4.2,-0.2,5])
x = np.linspace(-4,4,100)
plt.plot([-4,0,0,4],[1,1,0,0], label="zero one loss", lw=2)
plt.plot(x, np.log2(1+np.exp(-x)), label="logistic loss", lw=2)
plt.plot(x, np.where(x>=1,0,1-x), label="hinge loss", lw=2)
plt.plot(x, np.exp(-x), label="exponential loss", lw=2)
plt.plot(x, huber_classification(x), label="huber loss", lw=2, linestyle="--",color="darkorange")
plt.legend(loc='best', fontsize=13)
plt.xlabel("$y \cdot f(x)$",fontsize=14)
plt.ylabel("$Loss$",fontsize=14)
plt.title("Loss Function for Classification",fontsize=15)
Text(0.5,1,'Loss Function for Classification')
plt.figure(figsize=(8,5))
plt.axis([-8,8,-1,100])
x = np.linspace(-8,8,100)
plt.plot(x, np.log2(1+np.exp(-x)), label="logistic loss", lw=2)
plt.plot(x, np.exp(-x), label="exponential loss", lw=2)
plt.plot(x, huber_classification(x), label="huber loss", lw=2,color="darkorange")
plt.legend(loc='best', fontsize=13)
plt.xlabel("$y \cdot f(x)$",fontsize=14)
plt.ylabel("$Loss$",fontsize=14)
plt.title("Loss Function for Classification",fontsize=15)
plt.annotate("",xy=(-7.8,98),xytext=(-6.5,90),arrowprops=dict(facecolor='k',width=4,headwidth=10))
Text(-6.5,90,'')