Plot Multiple curve_training_validation_accuracy__loss
Thu Jul 28 2022 14:30:48 GMT+0000 (Coordinated Universal Time)
Saved by @mnis00014
def plot_performance_acc(hist, hist_one, hist_two, hist_three, hist_four, hist_five):
plt.rcParams['figure.figsize'] = (20, 10)
hist_00 = hist.history
epochs_00 = hist.epoch
hist_01 = hist_one.history
epochs_01 = hist_one.epoch
hist_02 = hist_two.history
epochs_02 = hist_two.epoch
hist_03 = hist_three.history
epochs_03 = hist_three.epoch
hist_04 = hist_four.history
epochs_04 = hist_four.epoch
hist_05 = hist_five.history
epochs_05 = hist_five.epoch
plt.subplot(1, 2, 1) # row 1, col 2 index 1
plt.plot(epochs_00, hist_00['accuracy'], label='Learning Rate: 0.1')
plt.plot(epochs_01, hist_01['accuracy'], label='Learning Rate: 0.01')
plt.plot(epochs_02, hist_02['accuracy'], label='Learning Rate: 0.001')
plt.plot(epochs_03, hist_03['accuracy'], label='Learning Rate: 0.0001')
plt.plot(epochs_04, hist_04['accuracy'], label='Learning Rate: 0.00001')
plt.plot(epochs_05, hist_05['accuracy'], label='Learning Rate: 0.00001')
plt.xlabel('Epochs')
plt.ylabel('Accuracy')
# plt.ylim([-0.001, 2.0])
plt.title('Training accuracy with Adam optimizer')
plt.legend(loc = 'lower right')
plt.subplot(1, 2, 2) # row 1, col 2 index 1
plt.plot(epochs_00, hist_00['val_accuracy'], label='Learning Rate: 0.1')
plt.plot(epochs_01, hist_01['val_accuracy'], label='Learning Rate: 0.01')
plt.plot(epochs_02, hist_02['val_accuracy'], label='Learning Rate: 0.001')
plt.plot(epochs_03, hist_03['val_accuracy'], label='Learning Rate: 0.0001')
plt.plot(epochs_04, hist_04['val_accuracy'], label='Learning Rate: 0.00001')
plt.plot(epochs_05, hist_05['val_accuracy'], label='Learning Rate: 0.00001')
plt.xlabel('Epochs')
plt.ylabel('Accuracy')
# plt.ylim([-0.001, 0.9])
plt.title('Validation accuracy with Adam optimizer')
plt.legend(loc = 'lower right')
plt.tight_layout(2)
fig1 = plt.gcf()
plt.show()
plt.draw()
fig1.savefig('acc.png', dpi=50)
-----------------------------------------------------------------------------------
def plot_performance_loss(hist, hist_one, hist_two, hist_three, hist_four, hist_05):
plt.rcParams['figure.figsize'] = (20, 10)
hist_00 = hist.history
epochs_00 = hist.epoch
hist_01 = hist_one.history
epochs_01 = hist_one.epoch
hist_02 = hist_two.history
epochs_02 = hist_two.epoch
hist_03 = hist_three.history
epochs_03 = hist_three.epoch
hist_04 = hist_four.history
epochs_04 = hist_four.epoch
hist_05 = hist_five.history
epochs_05 = hist_five.epoch
plt.subplot(1, 2, 1) # row 1, col 2 index 1
plt.plot(epochs_00, hist_00['loss'], label='Learning Rate: 0.1')
plt.plot(epochs_01, hist_01['loss'], label='Learning Rate: 0.01')
plt.plot(epochs_02, hist_02['loss'], label='Learning Rate: 0.001')
plt.plot(epochs_03, hist_03['loss'], label='Learning Rate: 0.0001')
plt.plot(epochs_04, hist_04['loss'], label='Learning Rate: 0.00001')
plt.plot(epochs_05, hist_05['loss'], label='Learning Rate: 0.00001')
plt.xlabel('Epochs')
plt.ylabel('Loss')
# plt.ylim([-0.001, 2.0])
plt.title('Training loss with Adam')
plt.legend(loc = 'upper right')
plt.subplot(1, 2, 2) # row 1, col 2 index 1
plt.plot(epochs_00, hist_00['val_loss'], label='Learning Rate: 0.1')
plt.plot(epochs_01, hist_01['val_loss'], label='Learning Rate: 0.01')
plt.plot(epochs_02, hist_02['val_loss'], label='Learning Rate: 0.001')
plt.plot(epochs_03, hist_03['val_loss'], label='Learning Rate: 0.0001')
plt.plot(epochs_04, hist_04['val_loss'], label='Learning Rate: 0.00001')
plt.plot(epochs_05, hist_05['loss'], label='Learning Rate: 0.00001')
plt.xlabel('Epochs')
plt.ylabel('Loss')
# plt.ylim([-0.001, 2.0])
plt.title('Validation loss with Adam')
plt.legend(loc = 'upper right')
plt.tight_layout(2)
fig1 = plt.gcf()
plt.show()
plt.draw()
fig1.savefig('loss.png', dpi=50)



Comments