2
votes

Whenever I run my model, the "Precision", "recall" ,"Sensitivity", "Specificity" Changes their name like, first time "Precision", next "Precision_11", then, "Precision_12".... so on.

How to solve this?

here is the code:


model.compile(optimizer="sgd",
              loss="categorical_crossentropy",
              metrics=[keras.metrics.Precision(), keras.metrics.Recall(), keras.metrics.SpecificityAtSensitivity(0.5), keras.metrics.SensitivityAtSpecificity(0.5), 'accuracy'])
# fit the model
# Run the cell. It will take some time to execute
r = model.fit_generator(
  training_set,
  validation_data=test_set,
  epochs=5,
  steps_per_epoch=len(training_set),
  validation_steps=len(test_set)
)

Here is the output:

Epoch 1/5 164/164 [==============================] - 111s 675ms/step - loss: 5.4092 - precision_22: 0.7641 - recall_12: 0.7641 - specificity_at_sensitivity_7: 0.8196 - sensitivity_at_specificity_9: 0.8196 - accuracy: 0.7641 - val_loss: 1.8738 - val_precision_22: 0.7965 - val_recall_12: 0.7965 - val_specificity_at_sensitivity_7: 0.8622 - val_sensitivity_at_specificity_9: 0.8622 - val_accuracy: 0.7965 Epoch 2/5 164/164 [==============================] - 109s 665ms/step - loss: 1.4624 - precision_22: 0.8702 - recall_12: 0.8702 - specificity_at_sensitivity_7: 0.9192 - sensitivity_at_specificity_9: 0.9192 - accuracy: 0.8702 - val_loss: 3.0408 - val_precision_22: 0.7340 - val_recall_12: 0.7340 - val_specificity_at_sensitivity_7: 0.8061 - val_sensitivity_at_specificity_9: 0.8061 - val_accuracy: 0.7340 Epoch 3/5 164/164 [==============================] - 110s 670ms/step - loss: 1.1008 - precision_22: 0.8882 - recall_12: 0.8882 - specificity_at_sensitivity_7: 0.9360 - sensitivity_at_specificity_9: 0.9360 - accuracy: 0.8882 - val_loss: 0.8237 - val_precision_22: 0.8830 - val_recall_12: 0.8830 - val_specificity_at_sensitivity_7: 0.9391 - val_sensitivity_at_specificity_9: 0.9391 - val_accuracy: 0.8830 Epoch 4/5 164/164 [==============================] - 109s 666ms/step - loss: 0.7959 - precision_22: 0.9031 - recall_12: 0.9031 - specificity_at_sensitivity_7: 0.9481 - sensitivity_at_specificity_9: 0.9481 - accuracy: 0.9031 - val_loss: 0.6393 - val_precision_22: 0.8926 - val_recall_12: 0.8926 - val_specificity_at_sensitivity_7: 0.9551 - val_sensitivity_at_specificity_9: 0.9551 - val_accuracy: 0.8926 Epoch 5/5 164/164 [==============================] - 109s 666ms/step - loss: 0.7639 - precision_22: 0.9100 - recall_12: 0.9100 - specificity_at_sensitivity_7: 0.9540 - sensitivity_at_specificity_9: 0.9540 - accuracy: 0.9100 - val_loss: 3.9008 - val_precision_22: 0.6843 - val_recall_12: 0.6843 - val_specificity_at_sensitivity_7: 0.7580 - val_sensitivity_at_specificity_9: 0.7580 - val_accuracy: 0.6843

2
I didn't see any issue in the above mentioned code. But to replicate your issue can you share your complete code, so that we can debug and help you.Thanks! - TFer2
@TFer2 Thanks, actually I also didn't notice any problem or error, but everytime I re-run the code ,the name changed and I have to re-write the names for plotting . they changed like, "recall_1" to "recall_2" ..etc etc.. - Gourab Sarker

2 Answers

0
votes

@TFer2 , Here is the Code:

# -*- coding: utf-8 -*-
"""Vgg19.ipynb

Automatically generated by Colaboratory.

Original file is located at
    https://colab.research.google.com/drive/1C71ob5s4BWiK0GF2eVOf00bpNhB1Nu_f
"""

# import the libraries as shown below

from keras.layers import Input, Lambda, Dense, Flatten
from keras.models import Model
#from keras.applications.resnet50 import ResNet50
#from keras.applications.vgg16 import VGG16
#from keras.applications.vgg16 import preprocess_input
from keras.preprocessing import image
from keras.preprocessing.image import ImageDataGenerator
from keras.models import Sequential
import numpy as np
from glob import glob
import matplotlib.pyplot as plt
from keras.applications.vgg19 import VGG19
from keras.applications.vgg19 import preprocess_input

import keras

# re-size all the images to this
IMAGE_SIZE = [224, 224]

train_path = 'drive/My Drive/chest_xray/train'
valid_path = 'drive/My Drive/chest_xray/test'

# Import the Vgg 16 library as shown below and add preprocessing layer to the front of VGG
# Here we will be using imagenet weights

vgg = VGG19(input_shape=IMAGE_SIZE + [3], weights='imagenet', include_top=False)

# don't train existing weights
for layer in vgg.layers:
    layer.trainable = False

# useful for getting number of output classes
folders = glob('drive/My Drive/chest_xray/train/*')

# our layers - you can add more if you want
x = Flatten()(vgg.output)

prediction = Dense(len(folders), activation='softmax')(x)

# create a model object
model = Model(inputs=vgg.input, outputs=prediction)

# view the structure of the model
model.summary()

# tell the model what cost and optimization method to use
# model.compile(
  #loss='categorical_crossentropy',
  #optimizer='adam',
  #metrics=['accuracy']
#)

model.compile(optimizer="adam",
              loss="categorical_crossentropy",
              metrics=[keras.metrics.Precision(), keras.metrics.Recall(), keras.metrics.SpecificityAtSensitivity(0.5), keras.metrics.SensitivityAtSpecificity(0.5), keras.metrics.AUC(), 'accuracy'])

# Use the Image Data Generator to import the images from the dataset
from keras.preprocessing.image import ImageDataGenerator

train_datagen = ImageDataGenerator(rescale = 1./255,
                                   shear_range = 0.2,
                                   zoom_range = 0.2,
                                   horizontal_flip = True)

test_datagen = ImageDataGenerator(rescale = 1./255)

# Make sure you provide the same target size as initialied for the image size
training_set = train_datagen.flow_from_directory('drive/My Drive/chest_xray/train',
                                                 target_size = (224, 224),
                                                 batch_size = 32,
                                                 class_mode = 'categorical')

test_set = test_datagen.flow_from_directory('drive/My Drive/chest_xray/test',
                                            target_size = (224, 224),
                                            batch_size = 32,
                                            class_mode = 'categorical')

# fit the model
# Run the cell. It will take some time to execute
r = model.fit_generator(
  training_set,
  validation_data=test_set,
  epochs=5,
  steps_per_epoch=len(training_set),
  validation_steps=len(test_set)
)

# plot the loss
plt.plot(r.history['loss'], label='train loss')
plt.plot(r.history['val_loss'], label='val loss')
plt.legend()
plt.show()
plt.savefig('LossVal_loss(Vgg19)')

# plot the accuracy
plt.plot(r.history['accuracy'], label='train acc')
plt.plot(r.history['val_accuracy'], label='val acc')
plt.legend()
plt.show()
plt.savefig('AccVal_acc(Vgg19)')

# plot the recall
plt.plot(r.history['recall_1'], label='train recall')
plt.plot(r.history['val_recall_1'], label='val recall')
plt.legend()
plt.show()
plt.savefig('RecallVal_recall(Vgg19)')

# plot the precision
plt.plot(r.history['precision_1'], label='train precision')
plt.plot(r.history['val_precision_1'], label='val precision')
plt.legend()
plt.show()
plt.savefig('PrecisionVal_precision(Vgg19)')

# plot the specificity_at_sensitivity
plt.plot(r.history['specificity_at_sensitivity_1'], label='train specificity_at_sensitivity')
plt.plot(r.history['val_specificity_at_sensitivity_1'], label='val specificity_at_sensitivity')
plt.legend()
plt.show()
plt.savefig('specificity_at_sensitivityVal_specificity_at_sensitivity(Vgg19)')


# plot the sensitivity_at_specificity
plt.plot(r.history['sensitivity_at_specificity_1'], label='train sensitivity_at_specificity')
plt.plot(r.history['val_sensitivity_at_specificity_1'], label='val sensitivity_at_specificity')
plt.legend()
plt.show()
plt.savefig('sensitivity_at_specificityVal_sensitivity_at_specificity(Vgg19)')

# plot the AUC/ROC
plt.plot(r.history['auc_1'], label='train AUC')
plt.plot(r.history['val_auc_1'], label='val AUC')
plt.legend()
plt.show()
plt.savefig('AUCVal_AUC(Vgg19)')

0
votes

I recently encountered this same odd behaviour. I did not find the reason for it, but I managed to circumvent it by explicitly passing the name argument to the constructor of the metrics that you instantiate in the metrics list in the compile function (e.g. Recall). See the docs, e.g. for the Recall metric for this optional name argument.

Therefore, I would suggest compiling your model as follows:

model.compile(optimizer="sgd",
              loss="categorical_crossentropy",
              metrics=[keras.metrics.Precision(name='precision'),
                       keras.metrics.Recall(name='recall'), 
                       keras.metrics.SpecificityAtSensitivity(0.5, name='specificity_at_sensitivity'),
                       keras.metrics.SensitivityAtSpecificity(0.5, name='sensitivity_at_specificity'),
                       'accuracy'])