Answered step by step
Verified Expert Solution
Link Copied!

Question

1 Approved Answer

* Retrain your model using 1 cycle scheduling and see if it improves training speed and model accuracy. K = keras.backend class ExponentialLearningRate ( keras

* Retrain your model using 1cycle scheduling and see if it improves training speed and model accuracy.
K = keras.backend
class ExponentialLearningRate(keras.callbacks.Callback):
def __init__(self, factor):
self.factor = factor
self.rates =[]
self.losses =[]
def on_batch_end(self, batch, logs):
self.rates.append(K.get_value(self.model.optimizer.learning_rate))
self.losses.append(logs["loss"])
K.set_value(self.model.optimizer.learning_rate, self.model.optimizer.learning_rate * self.factor)
def find_learning_rate(model, X, y, epochs=1, batch_size=32, min_rate=10**-5, max_rate=10):
init_weights = model.get_weights()
iterations = math.ceil(len(X)/ batch_size)* epochs
factor = np.exp(np.log(max_rate / min_rate)/ iterations)
init_lr = K.get_value(model.optimizer.learning_rate)
K.set_value(model.optimizer.learning_rate, min_rate)
exp_lr = ExponentialLearningRate(factor)
history = model.fit(X, y, epochs=epochs, batch_size=batch_size,
callbacks=[exp_lr])
K.set_value(model.optimizer.learning_rate, init_lr)
model.set_weights(init_weights)
return exp_lr.rates, exp_lr.losses
def plot_lr_vs_loss(rates, losses):
plt.plot(rates, losses)
plt.gca().set_xscale('log')
plt.hlines(min(losses), min(rates), max(rates))
plt.axis([min(rates), max(rates), min(losses),(losses[0]+ min(losses))/2])
plt.xlabel("Learning rate")
plt.ylabel("Loss")
class OneCycleScheduler(keras.callbacks.Callback):
def __init__(self, iterations, max_rate, start_rate=None,
last_iterations=None, last_rate=None):
self.iterations = iterations
self.max_rate = max_rate
self.start_rate = start_rate or max_rate /10
self.last_iterations = last_iterations or iterations //10+1
self.half_iteration =(iterations - self.last_iterations)//2
self.last_rate = last_rate or self.start_rate /1000
self.iteration =0
def _interpolate(self, iter1, iter2, rate1, rate2):
return ((rate2- rate1)*(self.iteration - iter1)
/(iter2- iter1)+ rate1)
def on_batch_begin(self, batch, logs):
if self.iteration < self.half_iteration:
rate = self._interpolate(0, self.half_iteration, self.start_rate, self.max_rate)
elif self.iteration <2* self.half_iteration:
rate = self._interpolate(self.half_iteration, 2* self.half_iteration,
self.max_rate, self.start_rate)
else:
rate = self._interpolate(2* self.half_iteration, self.iterations,
self.start_rate, self.last_rate)
self.iteration +=1
K.set_value(self.model.optimizer.learning_rate, rate)
import math
learning_rate =0.01
decay =1e-4
batch_size =32
n_steps_per_epoch = math.ceil(len(X_train)/ batch_size)
epochs = np.arange(n_epochs)
lrs = learning_rate /(1+ decay * epochs * n_steps_per_epoch)
plt.plot(epochs, lrs,"o-")
plt.axis([0, n_epochs -1,0,0.01])
plt.xlabel("Epoch")
plt.ylabel("Learning Rate")
plt.title("Power Scheduling", fontsize=14)
plt.grid(True)
plt.show()..... is that correct code for the above question ????!!

Step by Step Solution

There are 3 Steps involved in it

Step: 1

blur-text-image

Get Instant Access to Expert-Tailored Solutions

See step-by-step solutions with expert insights and AI powered tools for academic success

Step: 2

blur-text-image_2

Step: 3

blur-text-image_3

Ace Your Homework with AI

Get the answers you need in no time with our AI-driven, step-by-step assistance

Get Started

Recommended Textbook for

Mobile Communications

Authors: Jochen Schiller

2nd edition

978-0321123817, 321123816, 978-8131724262

More Books

Students also viewed these Programming questions