#!/usr/bin/env python
# coding: utf-8
#
Logistic Regression Examples
#
# ## libraries
# In[1]:
import numpy as np
import matplotlib.pyplot as plt
# In[2]:
import spkit
print(spkit.__version__)
# In[3]:
from spkit.ml import LogisticRegression
# ## Binary class
# In[4]:
N = 300
np.random.seed(1)
X = np.random.randn(N,2)
y = np.random.randint(0,2,N)
y.sort()
X[y==0,:]+=2 # just creating classes a little far
print(X.shape, y.shape)
plt.plot(X[y==0,0],X[y==0,1],'.b')
plt.plot(X[y==1,0],X[y==1,1],'.r')
plt.show()
# In[5]:
clf = LogisticRegression(alpha=0.1)
clf.fit(X,y,max_itr=1000)
yp = clf.predict(X)
ypr = clf.predict_proba(X)
print('Accuracy : ',np.mean(yp==y))
print('Loss : ',clf.Loss(y,ypr))
# In[6]:
plt.figure(figsize=(12,7))
ax1 = plt.subplot(221)
clf.plot_Lcurve(ax=ax1)
ax2 = plt.subplot(222)
clf.plot_boundries(X,y,ax=ax2)
ax3 = plt.subplot(223)
clf.plot_weights(ax=ax3)
ax4 = plt.subplot(224)
clf.plot_weights2(ax=ax4,grid=False)
# ## Multiclass with polynomial feature
# In[7]:
N =300
X = np.random.randn(N,2)
y = np.random.randint(0,3,N)
y.sort()
X[y==0,1]+=3
X[y==2,0]-=3
print(X.shape, y.shape)
plt.plot(X[y==0,0],X[y==0,1],'.b')
plt.plot(X[y==1,0],X[y==1,1],'.r')
plt.plot(X[y==2,0],X[y==2,1],'.g')
plt.show()
# In[8]:
clf = LogisticRegression(alpha=0.1,polyfit=True,degree=3,lambd=0,FeatureNormalize=True)
clf.fit(X,y,max_itr=1000)
yp = clf.predict(X)
ypr = clf.predict_proba(X)
print(clf)
print('')
print('Accuracy : ',np.mean(yp==y))
print('Loss : ',clf.Loss(clf.oneHot(y),ypr))
# In[9]:
plt.figure(figsize=(15,7))
ax1 = plt.subplot(221)
clf.plot_Lcurve(ax=ax1)
ax2 = plt.subplot(222)
clf.plot_boundries(X,y,ax=ax2)
ax3 = plt.subplot(223)
clf.plot_weights(ax=ax3)
ax4 = plt.subplot(224)
clf.plot_weights2(ax=ax4,grid=True)
# ## Iris Dataset
# In[10]:
from sklearn import datasets
from sklearn.model_selection import train_test_split
# In[11]:
data = datasets.load_iris()
X = data.data
y = data.target
Xt,Xs, yt, ys = train_test_split(X,y,test_size=0.3)
print(X.shape,y.shape, Xt.shape, yt.shape, Xs.shape, ys.shape)
# ### With polynomial features
# In[12]:
clf = LogisticRegression(alpha=0.1,polyfit=False,degree=3,lambd=0,FeatureNormalize=False)
clf.fit(Xt,yt,max_itr=1000)
ytp = clf.predict(Xt)
ytpr = clf.predict_proba(Xt)
ysp = clf.predict(Xs)
yspr = clf.predict_proba(Xs)
print(clf)
print('')
print('Training Accuracy : ',np.mean(ytp==yt))
print('Testing Accuracy : ',np.mean(ysp==ys))
print('Training Loss : ',clf.Loss(clf.oneHot(yt),ytpr))
print('Testing Loss : ',clf.Loss(clf.oneHot(ys),yspr))
# In[13]:
plt.figure(figsize=(15,7))
ax1 = plt.subplot(221)
clf.plot_Lcurve(ax=ax1)
ax3 = plt.subplot(223)
clf.plot_weights(ax=ax3)
ax4 = plt.subplot(224)
clf.plot_weights2(ax=ax4,grid=True)
# In[ ]: