Question
Q1: Create a multiclass classifier using One-vs-Rest strategy with the provided SGD binary classifier. Use Python code: # Importing dependencies ***************************************************** import numpy as np
Q1: Create a multiclass classifier using One-vs-Rest strategy with the provided SGD binary classifier.
Use Python code:
# Importing dependencies *****************************************************
import numpy as np import pandas as pd import matplotlib.pyplot as plt
# **************************************************************************** # Implementing an SGD learning algorithm # An object-oriented SGD class SGD(object): # Initialization def __init__(self, eta=0.01, n_iter= 20, shuffle=True, random_state=None): self.eta = eta self.n_iter = n_iter self.w_initialized = False self.shuffle = shuffle self.random_state = random_state # Training def fit(self, X, y): self._initialize_weights(X.shape[1]) self.losses_ = [] for i in range(self.n_iter): if self.shuffle: X, y = self._shuffle(X, y) losses = [] for xi, target in zip(X, y): losses.append(self._update_weights(xi, target)) avg_loss = np.mean(losses) self.losses_.append(avg_loss) return self # Fit training data without reinitializing the weights def partial_fit(self, X, y): if not self.w_initialized: self._initialize_weights(X.shape[1]) if y.ravel().shape[0] > 1: for xi, target in zip(X, y): self._update_weights(xi, target) else: self._update_weights(X, y) return self # Shuffle training data def _shuffle(self, X, y): r = self.rgen.permutation(len(y)) return X[r], y[r] # Initialize weights to small random numbers def _initialize_weights(self, m): self.rgen = np.random.RandomState(self.random_state) self.w_ = self.rgen.normal(loc=0.0, scale=0.01, size= m) self.b_ = np.float_(0.) self.w_initialized = True # Apply Adaline learning rule to update the weights def _update_weights(self, xi, target): output = self.activation(self.net_input(xi)) error = (target - output) self.w_ += self.eta * 2.0 * xi * (error) self.b_ += self.eta * 2.0 * error loss = error**2 return loss def net_input(self, X): # Calculate net input return np.dot(X, self.w_) + self.b_
def activation(self, X): """Compute linear activation""" return X
def predict(self, X): # Return class label after unit step return np.where(self.activation(self.net_input(X)) >= 0.5, 1, 0) # _____________________________________________________________________________ # Create a multiclass classifier # **************************************************************************** # Reading in the Iris Data df = pd.read_csv('https://archive.ics.uci.edu/ml/' 'machine-learning-databases/iris/iris.data', header=None)
# Plotting Iris Data print("Data regarding Iris DataSet (Setosa, Versicolor and Virginica) ") # Select setosa and versicolor y = df.iloc[0:150, 4].values
# Convert the class labels to two integer class1 = np.where(y == 'Iris-setosa', 1, -1) class2 = np.where(y == 'Iris-versicolor', 1, -1) class3 = np.where(y == 'Iris-virginica', 1, -1)
# Extract sepal length and petal length X = df.iloc[0:150, [0, 2]].values # Plot Data plt.scatter(X[:50, 0], X[:50, 1], color='purple', marker='o', label='setosa')
plt.scatter(X[50:100, 0], X[50:100, 1], color='green', marker='x', label='versicolor')
plt.scatter(X[100:150, 0], X[100:150, 1], color='red', marker='v', label='virginica')
plt.xlabel('Sepal Length [cm]') plt.ylabel('Petal Length [cm]') plt.legend(loc='upper left') # show plot plt.show() # ***************************************************************
Step by Step Solution
There are 3 Steps involved in it
Step: 1
Get Instant Access to Expert-Tailored Solutions
See step-by-step solutions with expert insights and AI powered tools for academic success
Step: 2
Step: 3
Ace Your Homework with AI
Get the answers you need in no time with our AI-driven, step-by-step assistance
Get Started