Logistic Regression - MultiNomial#
import numpy as np
import sklearn
import gzip
from matplotlib import pyplot as plt
# These are some parameters to make figures nice (and big)
params = {'legend.fontsize': 'x-large',
'figure.figsize': (16, 8),
'axes.labelsize': 'x-large',
'axes.titlesize':'x-large',
'xtick.labelsize':'x-large',
'ytick.labelsize':'x-large'}
plt.rcParams.update(params)
Data Pre Processing#
Read Data#
import urllib.request
urllib.request.urlretrieve("https://github.com/cdds-uiuc/simles-book/raw/main/content/DeepLearn/t10k-images-idx3-ubyte.gz", "t10k-images-idx3-ubyte.gz")
urllib.request.urlretrieve("https://github.com/cdds-uiuc/simles-book/raw/main/content/DeepLearn/t10k-labels-idx1-ubyte.gz", "t10k-labels-idx1-ubyte.gz")
urllib.request.urlretrieve("https://github.com/cdds-uiuc/simles-book/raw/main/content/DeepLearn/train-images-idx3-ubyte.gz", "train-images-idx3-ubyte.gz")
urllib.request.urlretrieve("https://github.com/cdds-uiuc/simles-book/raw/main/content/DeepLearn/train-labels-idx1-ubyte.gz", "train-labels-idx1-ubyte.gz")
('train-labels-idx1-ubyte.gz', <http.client.HTTPMessage at 0x13b726870>)
# Training data
# images
f = gzip.open('train-images-idx3-ubyte.gz','r')
image_size = 28
n_images_train = 50000
f.read(16)
buf = f.read(image_size * image_size * n_images_train)
data_train = np.frombuffer(buf, dtype=np.uint8).astype(np.float32)
data_train = data_train.reshape(n_images_train, image_size, image_size)
data_train=data_train/255
# labels
f = gzip.open('train-labels-idx1-ubyte.gz','r')
f.read(8)
labels_train=np.zeros(n_images_train)
for i in range(0,n_images_train):
buf = f.read(1)
labels_train[i]=np.frombuffer(buf, dtype=np.uint8).astype(np.int64)[0]
labels_train=labels_train.astype(int)
# Test data
#images
f = gzip.open('t10k-images-idx3-ubyte.gz','r')
image_size = 28
n_images_test = 10000
f.read(16)
buf = f.read(image_size * image_size * n_images_test)
data_test = np.frombuffer(buf, dtype=np.uint8).astype(np.float32)
data_test = data_test.reshape(n_images_test, image_size, image_size)
data_test = data_test/255
#labels
f = gzip.open('t10k-labels-idx1-ubyte.gz','r')
f.read(8)
labels_test=np.zeros(n_images_test)
for i in range(0,n_images_test):
buf = f.read(1)
labels_test[i]=np.frombuffer(buf, dtype=np.uint8).astype(np.int64)[0]
labels_test=labels_test.astype(int)
Inspect Raw Data#
Data shape#
# let's look at the data shape
print('training data')
print(data_train.shape)
print(labels_train.shape)
print(' ')
print('test data')
print(data_test.shape)
print(labels_test.shape)
print(' ')
print(labels_train[0:5])
training data
(50000, 28, 28)
(50000,)
test data
(10000, 28, 28)
(10000,)
[5 0 4 1 9]
Plot#
plt.figure(figsize=[3,3])
ind=np.random.randint(0,n_images_train)
plt.imshow(data_train[ind],cmap=plt.get_cmap('Greys'));
plt.title(labels_train[ind]);
plt.colorbar();
Restructure raw data into input data#
X_train=data_train.squeeze().reshape(n_images_train,28*28)
y_train=labels_train
print(X_train.shape)
print(y_train.shape)
(50000, 784)
(50000,)
X_test=data_test.squeeze().reshape(n_images_test,28*28)
y_test=labels_test
print(X_test.shape)
print(y_test.shape)
(10000, 784)
(10000,)
Logistic Regression#
Train model#
from sklearn import linear_model
#Define architecture (hyperparameters)
logreg_obj=linear_model.LogisticRegression(max_iter=5000)
# fit model (learn parameters)
logreg=logreg_obj.fit(X_train,y_train)
# make predictions
yhat_test=logreg.predict(X_test)
Make Predictions#
plt.figure(figsize=[4,4])
ind=np.random.randint(0,n_images_test)
plt.imshow(data_test[ind],cmap=plt.get_cmap('Greys'));
plt.title('truth= '+str(y_test[ind])+' prediction='+str(yhat_test[ind]));
plt.colorbar();
plt.tight_layout()
Confusion Matrix#
from sklearn.metrics import confusion_matrix
from sklearn.metrics import ConfusionMatrixDisplay
sklearn.metrics.ConfusionMatrixDisplay(confusion_matrix)
cm = confusion_matrix(y_true = y_test, y_pred = yhat_test)
ConfusionMatrixDisplay.from_predictions(y_test, yhat_test,cmap=plt.cm.Blues,normalize='true')
score=sklearn.metrics.accuracy_score(yhat_test,y_test)
print((1-score)*100)
7.540000000000003
Visualize Heat Map#
label_ind=0;
ind=np.random.randint(0,n_images_test)
ind=3818
sensitivity=logreg.coef_[label_ind,:].reshape(28,28)
sensitivity=np.flip(sensitivity,0)
heatmap=(X_test[ind,:]*logreg.coef_[label_ind,:])
heatmap=heatmap.reshape(28,28)
heatmap=np.flip(heatmap,0)
plt.figure(figsize=[9,3])
plt.subplot(1,3,1)
plt.pcolor(sensitivity)
plt.colorbar()
plt.set_cmap('seismic')
plt.clim(-1.5,1.5)
plt.xticks([])
plt.yticks([])
plt.title(r'$\beta_j$'+' for ' + str(label_ind))
plt.subplot(1,3,2)
plt.pcolor(np.flip(data_test[ind],0))
plt.colorbar()
plt.set_cmap('Greys')
plt.clim(0,1)
plt.xticks([])
plt.yticks([])
plt.title(r'$X_j$')
plt.subplot(1,3,3)
plt.pcolor(heatmap)
plt.colorbar()
plt.set_cmap('seismic')
plt.clim(-1,1)
plt.xticks([])
plt.yticks([])
plt.tight_layout()
plt.title(r'$\beta_j x_j$')
plt.xticks(ticks=[15],labels=[r'$\beta_0+\sum_j \beta_j x_j=$'+str(np.round(np.sum(np.sum(heatmap))+logreg.intercept_[0],2))])
([<matplotlib.axis.XTick at 0x1590e2300>],
[Text(15, 0, '$\\beta_0+\\sum_j \\beta_j x_j=$8.9')])