Author: Ruchit Agrawal
CheXNet
Grad-CamNet
from google.colab import drive
drive.mount('/content/drive')
image_folder = '/content/drive/My Drive/Medical image Reporting/data/images/'
labels_file = '/content/drive/My Drive/Medical image Reporting/data/labels.csv'
chexnet_weights = '/content/drive/My Drive/Medical image Reporting/ChexNet weights/brucechou1983_CheXNet_Keras_0.3.0_weights.h5'
class_name = "Atelectasis,Cardiomegaly,Effusion,Infiltration,Mass,Nodule,Pneumonia,Pneumothorax,Consolidation,Edema,Emphysema,Fibrosis,Pleural_Thickening,Hernia".split(',')
import warnings
warnings.filterwarnings('ignore')
import joblib
import os
import tensorflow as tf
from tensorflow.keras.layers import Dense,GlobalAveragePooling2D, Input, Embedding, LSTM,Dot,Reshape,Concatenate,BatchNormalization, GlobalMaxPooling2D, Dropout, Add, MaxPooling2D, GRU, AveragePooling2D, Activation
from tensorflow.keras.preprocessing.text import Tokenizer
from tensorflow.keras.preprocessing.sequence import pad_sequences
import pandas as pd
import numpy as np
import cv2
import random
import matplotlib.pyplot as plt
import matplotlib.cm as cm
import seaborn as sns
sns.set_context('notebook')
from nltk.translate.bleu_score import sentence_bleu
def create_chexnet(chexnet_weights = chexnet_weights, input_size = (224,224)):
"""
Creating the ChexNet model and loading the pre-trained weights
"""
model = tf.keras.applications.DenseNet121(include_top = False, input_shape = input_size+(3,))
#Importing DenseNet. The last layer will be a ReLU activation layer.
x = model.output #output from CheXNet
x = GlobalAveragePooling2D()(x)
x = Dense(14, activation = "softmax", name = "chexnet_output")(x)
# x = Activation('softmax')(x)
chexnet = tf.keras.Model(inputs = model.input, outputs = x)
chexnet.load_weights(chexnet_weights)
_chexnet = tf.keras.Model(inputs = model.input, outputs = chexnet.layers[-3].output)
return chexnet, _chexnet
def predict_chexnet(img,model, plot=False):
"""
return the name of the predicted class for the input image
"""
class_name = "Atelectasis,Cardiomegaly,Effusion,Infiltration,Mass,Nodule,Pneumonia,Pneumothorax,Consolidation,Edema,Emphysema,Fibrosis,Pleural_Thickening,Hernia".split(',')
pred = model(img)
if plot:
fig = plt.figure(figsize=(30,4))
x = np.array(class_name)
y = np.array(pred.numpy().reshape(-1))
plt.title('Prediction probabilities')
sns.barplot(x, y, palette="Blues_d")
plt.tight_layout()
index = tf.argmax(pred, axis=1).numpy()[0]
percentage = round(pred[0,index].numpy(),2)
return class_name[index] + f' {str(percentage)}'
model, _ = create_chexnet(chexnet_weights = chexnet_weights,input_size=(224,224))
Model Visualization
model.summary()
Model: "model" __________________________________________________________________________________________________ Layer (type) Output Shape Param # Connected to ================================================================================================== input_1 (InputLayer) [(None, 224, 224, 3) 0 __________________________________________________________________________________________________ zero_padding2d (ZeroPadding2D) (None, 230, 230, 3) 0 input_1[0][0] __________________________________________________________________________________________________ conv1/conv (Conv2D) (None, 112, 112, 64) 9408 zero_padding2d[0][0] __________________________________________________________________________________________________ conv1/bn (BatchNormalization) (None, 112, 112, 64) 256 conv1/conv[0][0] __________________________________________________________________________________________________ conv1/relu (Activation) (None, 112, 112, 64) 0 conv1/bn[0][0] __________________________________________________________________________________________________ zero_padding2d_1 (ZeroPadding2D (None, 114, 114, 64) 0 conv1/relu[0][0] __________________________________________________________________________________________________ pool1 (MaxPooling2D) (None, 56, 56, 64) 0 zero_padding2d_1[0][0] __________________________________________________________________________________________________ conv2_block1_0_bn (BatchNormali (None, 56, 56, 64) 256 pool1[0][0] __________________________________________________________________________________________________ conv2_block1_0_relu (Activation (None, 56, 56, 64) 0 conv2_block1_0_bn[0][0] __________________________________________________________________________________________________ conv2_block1_1_conv (Conv2D) (None, 56, 56, 128) 8192 conv2_block1_0_relu[0][0] __________________________________________________________________________________________________ conv2_block1_1_bn (BatchNormali (None, 56, 56, 128) 512 conv2_block1_1_conv[0][0] __________________________________________________________________________________________________ conv2_block1_1_relu (Activation (None, 56, 56, 128) 0 conv2_block1_1_bn[0][0] __________________________________________________________________________________________________ conv2_block1_2_conv (Conv2D) (None, 56, 56, 32) 36864 conv2_block1_1_relu[0][0] __________________________________________________________________________________________________ conv2_block1_concat (Concatenat (None, 56, 56, 96) 0 pool1[0][0] conv2_block1_2_conv[0][0] __________________________________________________________________________________________________ conv2_block2_0_bn (BatchNormali (None, 56, 56, 96) 384 conv2_block1_concat[0][0] __________________________________________________________________________________________________ conv2_block2_0_relu (Activation (None, 56, 56, 96) 0 conv2_block2_0_bn[0][0] __________________________________________________________________________________________________ conv2_block2_1_conv (Conv2D) (None, 56, 56, 128) 12288 conv2_block2_0_relu[0][0] __________________________________________________________________________________________________ conv2_block2_1_bn (BatchNormali (None, 56, 56, 128) 512 conv2_block2_1_conv[0][0] __________________________________________________________________________________________________ conv2_block2_1_relu (Activation (None, 56, 56, 128) 0 conv2_block2_1_bn[0][0] __________________________________________________________________________________________________ conv2_block2_2_conv (Conv2D) (None, 56, 56, 32) 36864 conv2_block2_1_relu[0][0] __________________________________________________________________________________________________ conv2_block2_concat (Concatenat (None, 56, 56, 128) 0 conv2_block1_concat[0][0] conv2_block2_2_conv[0][0] __________________________________________________________________________________________________ conv2_block3_0_bn (BatchNormali (None, 56, 56, 128) 512 conv2_block2_concat[0][0] __________________________________________________________________________________________________ conv2_block3_0_relu (Activation (None, 56, 56, 128) 0 conv2_block3_0_bn[0][0] __________________________________________________________________________________________________ conv2_block3_1_conv (Conv2D) (None, 56, 56, 128) 16384 conv2_block3_0_relu[0][0] __________________________________________________________________________________________________ conv2_block3_1_bn (BatchNormali (None, 56, 56, 128) 512 conv2_block3_1_conv[0][0] __________________________________________________________________________________________________ conv2_block3_1_relu (Activation (None, 56, 56, 128) 0 conv2_block3_1_bn[0][0] __________________________________________________________________________________________________ conv2_block3_2_conv (Conv2D) (None, 56, 56, 32) 36864 conv2_block3_1_relu[0][0] __________________________________________________________________________________________________ conv2_block3_concat (Concatenat (None, 56, 56, 160) 0 conv2_block2_concat[0][0] conv2_block3_2_conv[0][0] __________________________________________________________________________________________________ conv2_block4_0_bn (BatchNormali (None, 56, 56, 160) 640 conv2_block3_concat[0][0] __________________________________________________________________________________________________ conv2_block4_0_relu (Activation (None, 56, 56, 160) 0 conv2_block4_0_bn[0][0] __________________________________________________________________________________________________ conv2_block4_1_conv (Conv2D) (None, 56, 56, 128) 20480 conv2_block4_0_relu[0][0] __________________________________________________________________________________________________ conv2_block4_1_bn (BatchNormali (None, 56, 56, 128) 512 conv2_block4_1_conv[0][0] __________________________________________________________________________________________________ conv2_block4_1_relu (Activation (None, 56, 56, 128) 0 conv2_block4_1_bn[0][0] __________________________________________________________________________________________________ conv2_block4_2_conv (Conv2D) (None, 56, 56, 32) 36864 conv2_block4_1_relu[0][0] __________________________________________________________________________________________________ conv2_block4_concat (Concatenat (None, 56, 56, 192) 0 conv2_block3_concat[0][0] conv2_block4_2_conv[0][0] __________________________________________________________________________________________________ conv2_block5_0_bn (BatchNormali (None, 56, 56, 192) 768 conv2_block4_concat[0][0] __________________________________________________________________________________________________ conv2_block5_0_relu (Activation (None, 56, 56, 192) 0 conv2_block5_0_bn[0][0] __________________________________________________________________________________________________ conv2_block5_1_conv (Conv2D) (None, 56, 56, 128) 24576 conv2_block5_0_relu[0][0] __________________________________________________________________________________________________ conv2_block5_1_bn (BatchNormali (None, 56, 56, 128) 512 conv2_block5_1_conv[0][0] __________________________________________________________________________________________________ conv2_block5_1_relu (Activation (None, 56, 56, 128) 0 conv2_block5_1_bn[0][0] __________________________________________________________________________________________________ conv2_block5_2_conv (Conv2D) (None, 56, 56, 32) 36864 conv2_block5_1_relu[0][0] __________________________________________________________________________________________________ conv2_block5_concat (Concatenat (None, 56, 56, 224) 0 conv2_block4_concat[0][0] conv2_block5_2_conv[0][0] __________________________________________________________________________________________________ conv2_block6_0_bn (BatchNormali (None, 56, 56, 224) 896 conv2_block5_concat[0][0] __________________________________________________________________________________________________ conv2_block6_0_relu (Activation (None, 56, 56, 224) 0 conv2_block6_0_bn[0][0] __________________________________________________________________________________________________ conv2_block6_1_conv (Conv2D) (None, 56, 56, 128) 28672 conv2_block6_0_relu[0][0] __________________________________________________________________________________________________ conv2_block6_1_bn (BatchNormali (None, 56, 56, 128) 512 conv2_block6_1_conv[0][0] __________________________________________________________________________________________________ conv2_block6_1_relu (Activation (None, 56, 56, 128) 0 conv2_block6_1_bn[0][0] __________________________________________________________________________________________________ conv2_block6_2_conv (Conv2D) (None, 56, 56, 32) 36864 conv2_block6_1_relu[0][0] __________________________________________________________________________________________________ conv2_block6_concat (Concatenat (None, 56, 56, 256) 0 conv2_block5_concat[0][0] conv2_block6_2_conv[0][0] __________________________________________________________________________________________________ pool2_bn (BatchNormalization) (None, 56, 56, 256) 1024 conv2_block6_concat[0][0] __________________________________________________________________________________________________ pool2_relu (Activation) (None, 56, 56, 256) 0 pool2_bn[0][0] __________________________________________________________________________________________________ pool2_conv (Conv2D) (None, 56, 56, 128) 32768 pool2_relu[0][0] __________________________________________________________________________________________________ pool2_pool (AveragePooling2D) (None, 28, 28, 128) 0 pool2_conv[0][0] __________________________________________________________________________________________________ conv3_block1_0_bn (BatchNormali (None, 28, 28, 128) 512 pool2_pool[0][0] __________________________________________________________________________________________________ conv3_block1_0_relu (Activation (None, 28, 28, 128) 0 conv3_block1_0_bn[0][0] __________________________________________________________________________________________________ conv3_block1_1_conv (Conv2D) (None, 28, 28, 128) 16384 conv3_block1_0_relu[0][0] __________________________________________________________________________________________________ conv3_block1_1_bn (BatchNormali (None, 28, 28, 128) 512 conv3_block1_1_conv[0][0] __________________________________________________________________________________________________ conv3_block1_1_relu (Activation (None, 28, 28, 128) 0 conv3_block1_1_bn[0][0] __________________________________________________________________________________________________ conv3_block1_2_conv (Conv2D) (None, 28, 28, 32) 36864 conv3_block1_1_relu[0][0] __________________________________________________________________________________________________ conv3_block1_concat (Concatenat (None, 28, 28, 160) 0 pool2_pool[0][0] conv3_block1_2_conv[0][0] __________________________________________________________________________________________________ conv3_block2_0_bn (BatchNormali (None, 28, 28, 160) 640 conv3_block1_concat[0][0] __________________________________________________________________________________________________ conv3_block2_0_relu (Activation (None, 28, 28, 160) 0 conv3_block2_0_bn[0][0] __________________________________________________________________________________________________ conv3_block2_1_conv (Conv2D) (None, 28, 28, 128) 20480 conv3_block2_0_relu[0][0] __________________________________________________________________________________________________ conv3_block2_1_bn (BatchNormali (None, 28, 28, 128) 512 conv3_block2_1_conv[0][0] __________________________________________________________________________________________________ conv3_block2_1_relu (Activation (None, 28, 28, 128) 0 conv3_block2_1_bn[0][0] __________________________________________________________________________________________________ conv3_block2_2_conv (Conv2D) (None, 28, 28, 32) 36864 conv3_block2_1_relu[0][0] __________________________________________________________________________________________________ conv3_block2_concat (Concatenat (None, 28, 28, 192) 0 conv3_block1_concat[0][0] conv3_block2_2_conv[0][0] __________________________________________________________________________________________________ conv3_block3_0_bn (BatchNormali (None, 28, 28, 192) 768 conv3_block2_concat[0][0] __________________________________________________________________________________________________ conv3_block3_0_relu (Activation (None, 28, 28, 192) 0 conv3_block3_0_bn[0][0] __________________________________________________________________________________________________ conv3_block3_1_conv (Conv2D) (None, 28, 28, 128) 24576 conv3_block3_0_relu[0][0] __________________________________________________________________________________________________ conv3_block3_1_bn (BatchNormali (None, 28, 28, 128) 512 conv3_block3_1_conv[0][0] __________________________________________________________________________________________________ conv3_block3_1_relu (Activation (None, 28, 28, 128) 0 conv3_block3_1_bn[0][0] __________________________________________________________________________________________________ conv3_block3_2_conv (Conv2D) (None, 28, 28, 32) 36864 conv3_block3_1_relu[0][0] __________________________________________________________________________________________________ conv3_block3_concat (Concatenat (None, 28, 28, 224) 0 conv3_block2_concat[0][0] conv3_block3_2_conv[0][0] __________________________________________________________________________________________________ conv3_block4_0_bn (BatchNormali (None, 28, 28, 224) 896 conv3_block3_concat[0][0] __________________________________________________________________________________________________ conv3_block4_0_relu (Activation (None, 28, 28, 224) 0 conv3_block4_0_bn[0][0] __________________________________________________________________________________________________ conv3_block4_1_conv (Conv2D) (None, 28, 28, 128) 28672 conv3_block4_0_relu[0][0] __________________________________________________________________________________________________ conv3_block4_1_bn (BatchNormali (None, 28, 28, 128) 512 conv3_block4_1_conv[0][0] __________________________________________________________________________________________________ conv3_block4_1_relu (Activation (None, 28, 28, 128) 0 conv3_block4_1_bn[0][0] __________________________________________________________________________________________________ conv3_block4_2_conv (Conv2D) (None, 28, 28, 32) 36864 conv3_block4_1_relu[0][0] __________________________________________________________________________________________________ conv3_block4_concat (Concatenat (None, 28, 28, 256) 0 conv3_block3_concat[0][0] conv3_block4_2_conv[0][0] __________________________________________________________________________________________________ conv3_block5_0_bn (BatchNormali (None, 28, 28, 256) 1024 conv3_block4_concat[0][0] __________________________________________________________________________________________________ conv3_block5_0_relu (Activation (None, 28, 28, 256) 0 conv3_block5_0_bn[0][0] __________________________________________________________________________________________________ conv3_block5_1_conv (Conv2D) (None, 28, 28, 128) 32768 conv3_block5_0_relu[0][0] __________________________________________________________________________________________________ conv3_block5_1_bn (BatchNormali (None, 28, 28, 128) 512 conv3_block5_1_conv[0][0] __________________________________________________________________________________________________ conv3_block5_1_relu (Activation (None, 28, 28, 128) 0 conv3_block5_1_bn[0][0] __________________________________________________________________________________________________ conv3_block5_2_conv (Conv2D) (None, 28, 28, 32) 36864 conv3_block5_1_relu[0][0] __________________________________________________________________________________________________ conv3_block5_concat (Concatenat (None, 28, 28, 288) 0 conv3_block4_concat[0][0] conv3_block5_2_conv[0][0] __________________________________________________________________________________________________ conv3_block6_0_bn (BatchNormali (None, 28, 28, 288) 1152 conv3_block5_concat[0][0] __________________________________________________________________________________________________ conv3_block6_0_relu (Activation (None, 28, 28, 288) 0 conv3_block6_0_bn[0][0] __________________________________________________________________________________________________ conv3_block6_1_conv (Conv2D) (None, 28, 28, 128) 36864 conv3_block6_0_relu[0][0] __________________________________________________________________________________________________ conv3_block6_1_bn (BatchNormali (None, 28, 28, 128) 512 conv3_block6_1_conv[0][0] __________________________________________________________________________________________________ conv3_block6_1_relu (Activation (None, 28, 28, 128) 0 conv3_block6_1_bn[0][0] __________________________________________________________________________________________________ conv3_block6_2_conv (Conv2D) (None, 28, 28, 32) 36864 conv3_block6_1_relu[0][0] __________________________________________________________________________________________________ conv3_block6_concat (Concatenat (None, 28, 28, 320) 0 conv3_block5_concat[0][0] conv3_block6_2_conv[0][0] __________________________________________________________________________________________________ conv3_block7_0_bn (BatchNormali (None, 28, 28, 320) 1280 conv3_block6_concat[0][0] __________________________________________________________________________________________________ conv3_block7_0_relu (Activation (None, 28, 28, 320) 0 conv3_block7_0_bn[0][0] __________________________________________________________________________________________________ conv3_block7_1_conv (Conv2D) (None, 28, 28, 128) 40960 conv3_block7_0_relu[0][0] __________________________________________________________________________________________________ conv3_block7_1_bn (BatchNormali (None, 28, 28, 128) 512 conv3_block7_1_conv[0][0] __________________________________________________________________________________________________ conv3_block7_1_relu (Activation (None, 28, 28, 128) 0 conv3_block7_1_bn[0][0] __________________________________________________________________________________________________ conv3_block7_2_conv (Conv2D) (None, 28, 28, 32) 36864 conv3_block7_1_relu[0][0] __________________________________________________________________________________________________ conv3_block7_concat (Concatenat (None, 28, 28, 352) 0 conv3_block6_concat[0][0] conv3_block7_2_conv[0][0] __________________________________________________________________________________________________ conv3_block8_0_bn (BatchNormali (None, 28, 28, 352) 1408 conv3_block7_concat[0][0] __________________________________________________________________________________________________ conv3_block8_0_relu (Activation (None, 28, 28, 352) 0 conv3_block8_0_bn[0][0] __________________________________________________________________________________________________ conv3_block8_1_conv (Conv2D) (None, 28, 28, 128) 45056 conv3_block8_0_relu[0][0] __________________________________________________________________________________________________ conv3_block8_1_bn (BatchNormali (None, 28, 28, 128) 512 conv3_block8_1_conv[0][0] __________________________________________________________________________________________________ conv3_block8_1_relu (Activation (None, 28, 28, 128) 0 conv3_block8_1_bn[0][0] __________________________________________________________________________________________________ conv3_block8_2_conv (Conv2D) (None, 28, 28, 32) 36864 conv3_block8_1_relu[0][0] __________________________________________________________________________________________________ conv3_block8_concat (Concatenat (None, 28, 28, 384) 0 conv3_block7_concat[0][0] conv3_block8_2_conv[0][0] __________________________________________________________________________________________________ conv3_block9_0_bn (BatchNormali (None, 28, 28, 384) 1536 conv3_block8_concat[0][0] __________________________________________________________________________________________________ conv3_block9_0_relu (Activation (None, 28, 28, 384) 0 conv3_block9_0_bn[0][0] __________________________________________________________________________________________________ conv3_block9_1_conv (Conv2D) (None, 28, 28, 128) 49152 conv3_block9_0_relu[0][0] __________________________________________________________________________________________________ conv3_block9_1_bn (BatchNormali (None, 28, 28, 128) 512 conv3_block9_1_conv[0][0] __________________________________________________________________________________________________ conv3_block9_1_relu (Activation (None, 28, 28, 128) 0 conv3_block9_1_bn[0][0] __________________________________________________________________________________________________ conv3_block9_2_conv (Conv2D) (None, 28, 28, 32) 36864 conv3_block9_1_relu[0][0] __________________________________________________________________________________________________ conv3_block9_concat (Concatenat (None, 28, 28, 416) 0 conv3_block8_concat[0][0] conv3_block9_2_conv[0][0] __________________________________________________________________________________________________ conv3_block10_0_bn (BatchNormal (None, 28, 28, 416) 1664 conv3_block9_concat[0][0] __________________________________________________________________________________________________ conv3_block10_0_relu (Activatio (None, 28, 28, 416) 0 conv3_block10_0_bn[0][0] __________________________________________________________________________________________________ conv3_block10_1_conv (Conv2D) (None, 28, 28, 128) 53248 conv3_block10_0_relu[0][0] __________________________________________________________________________________________________ conv3_block10_1_bn (BatchNormal (None, 28, 28, 128) 512 conv3_block10_1_conv[0][0] __________________________________________________________________________________________________ conv3_block10_1_relu (Activatio (None, 28, 28, 128) 0 conv3_block10_1_bn[0][0] __________________________________________________________________________________________________ conv3_block10_2_conv (Conv2D) (None, 28, 28, 32) 36864 conv3_block10_1_relu[0][0] __________________________________________________________________________________________________ conv3_block10_concat (Concatena (None, 28, 28, 448) 0 conv3_block9_concat[0][0] conv3_block10_2_conv[0][0] __________________________________________________________________________________________________ conv3_block11_0_bn (BatchNormal (None, 28, 28, 448) 1792 conv3_block10_concat[0][0] __________________________________________________________________________________________________ conv3_block11_0_relu (Activatio (None, 28, 28, 448) 0 conv3_block11_0_bn[0][0] __________________________________________________________________________________________________ conv3_block11_1_conv (Conv2D) (None, 28, 28, 128) 57344 conv3_block11_0_relu[0][0] __________________________________________________________________________________________________ conv3_block11_1_bn (BatchNormal (None, 28, 28, 128) 512 conv3_block11_1_conv[0][0] __________________________________________________________________________________________________ conv3_block11_1_relu (Activatio (None, 28, 28, 128) 0 conv3_block11_1_bn[0][0] __________________________________________________________________________________________________ conv3_block11_2_conv (Conv2D) (None, 28, 28, 32) 36864 conv3_block11_1_relu[0][0] __________________________________________________________________________________________________ conv3_block11_concat (Concatena (None, 28, 28, 480) 0 conv3_block10_concat[0][0] conv3_block11_2_conv[0][0] __________________________________________________________________________________________________ conv3_block12_0_bn (BatchNormal (None, 28, 28, 480) 1920 conv3_block11_concat[0][0] __________________________________________________________________________________________________ conv3_block12_0_relu (Activatio (None, 28, 28, 480) 0 conv3_block12_0_bn[0][0] __________________________________________________________________________________________________ conv3_block12_1_conv (Conv2D) (None, 28, 28, 128) 61440 conv3_block12_0_relu[0][0] __________________________________________________________________________________________________ conv3_block12_1_bn (BatchNormal (None, 28, 28, 128) 512 conv3_block12_1_conv[0][0] __________________________________________________________________________________________________ conv3_block12_1_relu (Activatio (None, 28, 28, 128) 0 conv3_block12_1_bn[0][0] __________________________________________________________________________________________________ conv3_block12_2_conv (Conv2D) (None, 28, 28, 32) 36864 conv3_block12_1_relu[0][0] __________________________________________________________________________________________________ conv3_block12_concat (Concatena (None, 28, 28, 512) 0 conv3_block11_concat[0][0] conv3_block12_2_conv[0][0] __________________________________________________________________________________________________ pool3_bn (BatchNormalization) (None, 28, 28, 512) 2048 conv3_block12_concat[0][0] __________________________________________________________________________________________________ pool3_relu (Activation) (None, 28, 28, 512) 0 pool3_bn[0][0] __________________________________________________________________________________________________ pool3_conv (Conv2D) (None, 28, 28, 256) 131072 pool3_relu[0][0] __________________________________________________________________________________________________ pool3_pool (AveragePooling2D) (None, 14, 14, 256) 0 pool3_conv[0][0] __________________________________________________________________________________________________ conv4_block1_0_bn (BatchNormali (None, 14, 14, 256) 1024 pool3_pool[0][0] __________________________________________________________________________________________________ conv4_block1_0_relu (Activation (None, 14, 14, 256) 0 conv4_block1_0_bn[0][0] __________________________________________________________________________________________________ conv4_block1_1_conv (Conv2D) (None, 14, 14, 128) 32768 conv4_block1_0_relu[0][0] __________________________________________________________________________________________________ conv4_block1_1_bn (BatchNormali (None, 14, 14, 128) 512 conv4_block1_1_conv[0][0] __________________________________________________________________________________________________ conv4_block1_1_relu (Activation (None, 14, 14, 128) 0 conv4_block1_1_bn[0][0] __________________________________________________________________________________________________ conv4_block1_2_conv (Conv2D) (None, 14, 14, 32) 36864 conv4_block1_1_relu[0][0] __________________________________________________________________________________________________ conv4_block1_concat (Concatenat (None, 14, 14, 288) 0 pool3_pool[0][0] conv4_block1_2_conv[0][0] __________________________________________________________________________________________________ conv4_block2_0_bn (BatchNormali (None, 14, 14, 288) 1152 conv4_block1_concat[0][0] __________________________________________________________________________________________________ conv4_block2_0_relu (Activation (None, 14, 14, 288) 0 conv4_block2_0_bn[0][0] __________________________________________________________________________________________________ conv4_block2_1_conv (Conv2D) (None, 14, 14, 128) 36864 conv4_block2_0_relu[0][0] __________________________________________________________________________________________________ conv4_block2_1_bn (BatchNormali (None, 14, 14, 128) 512 conv4_block2_1_conv[0][0] __________________________________________________________________________________________________ conv4_block2_1_relu (Activation (None, 14, 14, 128) 0 conv4_block2_1_bn[0][0] __________________________________________________________________________________________________ conv4_block2_2_conv (Conv2D) (None, 14, 14, 32) 36864 conv4_block2_1_relu[0][0] __________________________________________________________________________________________________ conv4_block2_concat (Concatenat (None, 14, 14, 320) 0 conv4_block1_concat[0][0] conv4_block2_2_conv[0][0] __________________________________________________________________________________________________ conv4_block3_0_bn (BatchNormali (None, 14, 14, 320) 1280 conv4_block2_concat[0][0] __________________________________________________________________________________________________ conv4_block3_0_relu (Activation (None, 14, 14, 320) 0 conv4_block3_0_bn[0][0] __________________________________________________________________________________________________ conv4_block3_1_conv (Conv2D) (None, 14, 14, 128) 40960 conv4_block3_0_relu[0][0] __________________________________________________________________________________________________ conv4_block3_1_bn (BatchNormali (None, 14, 14, 128) 512 conv4_block3_1_conv[0][0] __________________________________________________________________________________________________ conv4_block3_1_relu (Activation (None, 14, 14, 128) 0 conv4_block3_1_bn[0][0] __________________________________________________________________________________________________ conv4_block3_2_conv (Conv2D) (None, 14, 14, 32) 36864 conv4_block3_1_relu[0][0] __________________________________________________________________________________________________ conv4_block3_concat (Concatenat (None, 14, 14, 352) 0 conv4_block2_concat[0][0] conv4_block3_2_conv[0][0] __________________________________________________________________________________________________ conv4_block4_0_bn (BatchNormali (None, 14, 14, 352) 1408 conv4_block3_concat[0][0] __________________________________________________________________________________________________ conv4_block4_0_relu (Activation (None, 14, 14, 352) 0 conv4_block4_0_bn[0][0] __________________________________________________________________________________________________ conv4_block4_1_conv (Conv2D) (None, 14, 14, 128) 45056 conv4_block4_0_relu[0][0] __________________________________________________________________________________________________ conv4_block4_1_bn (BatchNormali (None, 14, 14, 128) 512 conv4_block4_1_conv[0][0] __________________________________________________________________________________________________ conv4_block4_1_relu (Activation (None, 14, 14, 128) 0 conv4_block4_1_bn[0][0] __________________________________________________________________________________________________ conv4_block4_2_conv (Conv2D) (None, 14, 14, 32) 36864 conv4_block4_1_relu[0][0] __________________________________________________________________________________________________ conv4_block4_concat (Concatenat (None, 14, 14, 384) 0 conv4_block3_concat[0][0] conv4_block4_2_conv[0][0] __________________________________________________________________________________________________ conv4_block5_0_bn (BatchNormali (None, 14, 14, 384) 1536 conv4_block4_concat[0][0] __________________________________________________________________________________________________ conv4_block5_0_relu (Activation (None, 14, 14, 384) 0 conv4_block5_0_bn[0][0] __________________________________________________________________________________________________ conv4_block5_1_conv (Conv2D) (None, 14, 14, 128) 49152 conv4_block5_0_relu[0][0] __________________________________________________________________________________________________ conv4_block5_1_bn (BatchNormali (None, 14, 14, 128) 512 conv4_block5_1_conv[0][0] __________________________________________________________________________________________________ conv4_block5_1_relu (Activation (None, 14, 14, 128) 0 conv4_block5_1_bn[0][0] __________________________________________________________________________________________________ conv4_block5_2_conv (Conv2D) (None, 14, 14, 32) 36864 conv4_block5_1_relu[0][0] __________________________________________________________________________________________________ conv4_block5_concat (Concatenat (None, 14, 14, 416) 0 conv4_block4_concat[0][0] conv4_block5_2_conv[0][0] __________________________________________________________________________________________________ conv4_block6_0_bn (BatchNormali (None, 14, 14, 416) 1664 conv4_block5_concat[0][0] __________________________________________________________________________________________________ conv4_block6_0_relu (Activation (None, 14, 14, 416) 0 conv4_block6_0_bn[0][0] __________________________________________________________________________________________________ conv4_block6_1_conv (Conv2D) (None, 14, 14, 128) 53248 conv4_block6_0_relu[0][0] __________________________________________________________________________________________________ conv4_block6_1_bn (BatchNormali (None, 14, 14, 128) 512 conv4_block6_1_conv[0][0] __________________________________________________________________________________________________ conv4_block6_1_relu (Activation (None, 14, 14, 128) 0 conv4_block6_1_bn[0][0] __________________________________________________________________________________________________ conv4_block6_2_conv (Conv2D) (None, 14, 14, 32) 36864 conv4_block6_1_relu[0][0] __________________________________________________________________________________________________ conv4_block6_concat (Concatenat (None, 14, 14, 448) 0 conv4_block5_concat[0][0] conv4_block6_2_conv[0][0] __________________________________________________________________________________________________ conv4_block7_0_bn (BatchNormali (None, 14, 14, 448) 1792 conv4_block6_concat[0][0] __________________________________________________________________________________________________ conv4_block7_0_relu (Activation (None, 14, 14, 448) 0 conv4_block7_0_bn[0][0] __________________________________________________________________________________________________ conv4_block7_1_conv (Conv2D) (None, 14, 14, 128) 57344 conv4_block7_0_relu[0][0] __________________________________________________________________________________________________ conv4_block7_1_bn (BatchNormali (None, 14, 14, 128) 512 conv4_block7_1_conv[0][0] __________________________________________________________________________________________________ conv4_block7_1_relu (Activation (None, 14, 14, 128) 0 conv4_block7_1_bn[0][0] __________________________________________________________________________________________________ conv4_block7_2_conv (Conv2D) (None, 14, 14, 32) 36864 conv4_block7_1_relu[0][0] __________________________________________________________________________________________________ conv4_block7_concat (Concatenat (None, 14, 14, 480) 0 conv4_block6_concat[0][0] conv4_block7_2_conv[0][0] __________________________________________________________________________________________________ conv4_block8_0_bn (BatchNormali (None, 14, 14, 480) 1920 conv4_block7_concat[0][0] __________________________________________________________________________________________________ conv4_block8_0_relu (Activation (None, 14, 14, 480) 0 conv4_block8_0_bn[0][0] __________________________________________________________________________________________________ conv4_block8_1_conv (Conv2D) (None, 14, 14, 128) 61440 conv4_block8_0_relu[0][0] __________________________________________________________________________________________________ conv4_block8_1_bn (BatchNormali (None, 14, 14, 128) 512 conv4_block8_1_conv[0][0] __________________________________________________________________________________________________ conv4_block8_1_relu (Activation (None, 14, 14, 128) 0 conv4_block8_1_bn[0][0] __________________________________________________________________________________________________ conv4_block8_2_conv (Conv2D) (None, 14, 14, 32) 36864 conv4_block8_1_relu[0][0] __________________________________________________________________________________________________ conv4_block8_concat (Concatenat (None, 14, 14, 512) 0 conv4_block7_concat[0][0] conv4_block8_2_conv[0][0] __________________________________________________________________________________________________ conv4_block9_0_bn (BatchNormali (None, 14, 14, 512) 2048 conv4_block8_concat[0][0] __________________________________________________________________________________________________ conv4_block9_0_relu (Activation (None, 14, 14, 512) 0 conv4_block9_0_bn[0][0] __________________________________________________________________________________________________ conv4_block9_1_conv (Conv2D) (None, 14, 14, 128) 65536 conv4_block9_0_relu[0][0] __________________________________________________________________________________________________ conv4_block9_1_bn (BatchNormali (None, 14, 14, 128) 512 conv4_block9_1_conv[0][0] __________________________________________________________________________________________________ conv4_block9_1_relu (Activation (None, 14, 14, 128) 0 conv4_block9_1_bn[0][0] __________________________________________________________________________________________________ conv4_block9_2_conv (Conv2D) (None, 14, 14, 32) 36864 conv4_block9_1_relu[0][0] __________________________________________________________________________________________________ conv4_block9_concat (Concatenat (None, 14, 14, 544) 0 conv4_block8_concat[0][0] conv4_block9_2_conv[0][0] __________________________________________________________________________________________________ conv4_block10_0_bn (BatchNormal (None, 14, 14, 544) 2176 conv4_block9_concat[0][0] __________________________________________________________________________________________________ conv4_block10_0_relu (Activatio (None, 14, 14, 544) 0 conv4_block10_0_bn[0][0] __________________________________________________________________________________________________ conv4_block10_1_conv (Conv2D) (None, 14, 14, 128) 69632 conv4_block10_0_relu[0][0] __________________________________________________________________________________________________ conv4_block10_1_bn (BatchNormal (None, 14, 14, 128) 512 conv4_block10_1_conv[0][0] __________________________________________________________________________________________________ conv4_block10_1_relu (Activatio (None, 14, 14, 128) 0 conv4_block10_1_bn[0][0] __________________________________________________________________________________________________ conv4_block10_2_conv (Conv2D) (None, 14, 14, 32) 36864 conv4_block10_1_relu[0][0] __________________________________________________________________________________________________ conv4_block10_concat (Concatena (None, 14, 14, 576) 0 conv4_block9_concat[0][0] conv4_block10_2_conv[0][0] __________________________________________________________________________________________________ conv4_block11_0_bn (BatchNormal (None, 14, 14, 576) 2304 conv4_block10_concat[0][0] __________________________________________________________________________________________________ conv4_block11_0_relu (Activatio (None, 14, 14, 576) 0 conv4_block11_0_bn[0][0] __________________________________________________________________________________________________ conv4_block11_1_conv (Conv2D) (None, 14, 14, 128) 73728 conv4_block11_0_relu[0][0] __________________________________________________________________________________________________ conv4_block11_1_bn (BatchNormal (None, 14, 14, 128) 512 conv4_block11_1_conv[0][0] __________________________________________________________________________________________________ conv4_block11_1_relu (Activatio (None, 14, 14, 128) 0 conv4_block11_1_bn[0][0] __________________________________________________________________________________________________ conv4_block11_2_conv (Conv2D) (None, 14, 14, 32) 36864 conv4_block11_1_relu[0][0] __________________________________________________________________________________________________ conv4_block11_concat (Concatena (None, 14, 14, 608) 0 conv4_block10_concat[0][0] conv4_block11_2_conv[0][0] __________________________________________________________________________________________________ conv4_block12_0_bn (BatchNormal (None, 14, 14, 608) 2432 conv4_block11_concat[0][0] __________________________________________________________________________________________________ conv4_block12_0_relu (Activatio (None, 14, 14, 608) 0 conv4_block12_0_bn[0][0] __________________________________________________________________________________________________ conv4_block12_1_conv (Conv2D) (None, 14, 14, 128) 77824 conv4_block12_0_relu[0][0] __________________________________________________________________________________________________ conv4_block12_1_bn (BatchNormal (None, 14, 14, 128) 512 conv4_block12_1_conv[0][0] __________________________________________________________________________________________________ conv4_block12_1_relu (Activatio (None, 14, 14, 128) 0 conv4_block12_1_bn[0][0] __________________________________________________________________________________________________ conv4_block12_2_conv (Conv2D) (None, 14, 14, 32) 36864 conv4_block12_1_relu[0][0] __________________________________________________________________________________________________ conv4_block12_concat (Concatena (None, 14, 14, 640) 0 conv4_block11_concat[0][0] conv4_block12_2_conv[0][0] __________________________________________________________________________________________________ conv4_block13_0_bn (BatchNormal (None, 14, 14, 640) 2560 conv4_block12_concat[0][0] __________________________________________________________________________________________________ conv4_block13_0_relu (Activatio (None, 14, 14, 640) 0 conv4_block13_0_bn[0][0] __________________________________________________________________________________________________ conv4_block13_1_conv (Conv2D) (None, 14, 14, 128) 81920 conv4_block13_0_relu[0][0] __________________________________________________________________________________________________ conv4_block13_1_bn (BatchNormal (None, 14, 14, 128) 512 conv4_block13_1_conv[0][0] __________________________________________________________________________________________________ conv4_block13_1_relu (Activatio (None, 14, 14, 128) 0 conv4_block13_1_bn[0][0] __________________________________________________________________________________________________ conv4_block13_2_conv (Conv2D) (None, 14, 14, 32) 36864 conv4_block13_1_relu[0][0] __________________________________________________________________________________________________ conv4_block13_concat (Concatena (None, 14, 14, 672) 0 conv4_block12_concat[0][0] conv4_block13_2_conv[0][0] __________________________________________________________________________________________________ conv4_block14_0_bn (BatchNormal (None, 14, 14, 672) 2688 conv4_block13_concat[0][0] __________________________________________________________________________________________________ conv4_block14_0_relu (Activatio (None, 14, 14, 672) 0 conv4_block14_0_bn[0][0] __________________________________________________________________________________________________ conv4_block14_1_conv (Conv2D) (None, 14, 14, 128) 86016 conv4_block14_0_relu[0][0] __________________________________________________________________________________________________ conv4_block14_1_bn (BatchNormal (None, 14, 14, 128) 512 conv4_block14_1_conv[0][0] __________________________________________________________________________________________________ conv4_block14_1_relu (Activatio (None, 14, 14, 128) 0 conv4_block14_1_bn[0][0] __________________________________________________________________________________________________ conv4_block14_2_conv (Conv2D) (None, 14, 14, 32) 36864 conv4_block14_1_relu[0][0] __________________________________________________________________________________________________ conv4_block14_concat (Concatena (None, 14, 14, 704) 0 conv4_block13_concat[0][0] conv4_block14_2_conv[0][0] __________________________________________________________________________________________________ conv4_block15_0_bn (BatchNormal (None, 14, 14, 704) 2816 conv4_block14_concat[0][0] __________________________________________________________________________________________________ conv4_block15_0_relu (Activatio (None, 14, 14, 704) 0 conv4_block15_0_bn[0][0] __________________________________________________________________________________________________ conv4_block15_1_conv (Conv2D) (None, 14, 14, 128) 90112 conv4_block15_0_relu[0][0] __________________________________________________________________________________________________ conv4_block15_1_bn (BatchNormal (None, 14, 14, 128) 512 conv4_block15_1_conv[0][0] __________________________________________________________________________________________________ conv4_block15_1_relu (Activatio (None, 14, 14, 128) 0 conv4_block15_1_bn[0][0] __________________________________________________________________________________________________ conv4_block15_2_conv (Conv2D) (None, 14, 14, 32) 36864 conv4_block15_1_relu[0][0] __________________________________________________________________________________________________ conv4_block15_concat (Concatena (None, 14, 14, 736) 0 conv4_block14_concat[0][0] conv4_block15_2_conv[0][0] __________________________________________________________________________________________________ conv4_block16_0_bn (BatchNormal (None, 14, 14, 736) 2944 conv4_block15_concat[0][0] __________________________________________________________________________________________________ conv4_block16_0_relu (Activatio (None, 14, 14, 736) 0 conv4_block16_0_bn[0][0] __________________________________________________________________________________________________ conv4_block16_1_conv (Conv2D) (None, 14, 14, 128) 94208 conv4_block16_0_relu[0][0] __________________________________________________________________________________________________ conv4_block16_1_bn (BatchNormal (None, 14, 14, 128) 512 conv4_block16_1_conv[0][0] __________________________________________________________________________________________________ conv4_block16_1_relu (Activatio (None, 14, 14, 128) 0 conv4_block16_1_bn[0][0] __________________________________________________________________________________________________ conv4_block16_2_conv (Conv2D) (None, 14, 14, 32) 36864 conv4_block16_1_relu[0][0] __________________________________________________________________________________________________ conv4_block16_concat (Concatena (None, 14, 14, 768) 0 conv4_block15_concat[0][0] conv4_block16_2_conv[0][0] __________________________________________________________________________________________________ conv4_block17_0_bn (BatchNormal (None, 14, 14, 768) 3072 conv4_block16_concat[0][0] __________________________________________________________________________________________________ conv4_block17_0_relu (Activatio (None, 14, 14, 768) 0 conv4_block17_0_bn[0][0] __________________________________________________________________________________________________ conv4_block17_1_conv (Conv2D) (None, 14, 14, 128) 98304 conv4_block17_0_relu[0][0] __________________________________________________________________________________________________ conv4_block17_1_bn (BatchNormal (None, 14, 14, 128) 512 conv4_block17_1_conv[0][0] __________________________________________________________________________________________________ conv4_block17_1_relu (Activatio (None, 14, 14, 128) 0 conv4_block17_1_bn[0][0] __________________________________________________________________________________________________ conv4_block17_2_conv (Conv2D) (None, 14, 14, 32) 36864 conv4_block17_1_relu[0][0] __________________________________________________________________________________________________ conv4_block17_concat (Concatena (None, 14, 14, 800) 0 conv4_block16_concat[0][0] conv4_block17_2_conv[0][0] __________________________________________________________________________________________________ conv4_block18_0_bn (BatchNormal (None, 14, 14, 800) 3200 conv4_block17_concat[0][0] __________________________________________________________________________________________________ conv4_block18_0_relu (Activatio (None, 14, 14, 800) 0 conv4_block18_0_bn[0][0] __________________________________________________________________________________________________ conv4_block18_1_conv (Conv2D) (None, 14, 14, 128) 102400 conv4_block18_0_relu[0][0] __________________________________________________________________________________________________ conv4_block18_1_bn (BatchNormal (None, 14, 14, 128) 512 conv4_block18_1_conv[0][0] __________________________________________________________________________________________________ conv4_block18_1_relu (Activatio (None, 14, 14, 128) 0 conv4_block18_1_bn[0][0] __________________________________________________________________________________________________ conv4_block18_2_conv (Conv2D) (None, 14, 14, 32) 36864 conv4_block18_1_relu[0][0] __________________________________________________________________________________________________ conv4_block18_concat (Concatena (None, 14, 14, 832) 0 conv4_block17_concat[0][0] conv4_block18_2_conv[0][0] __________________________________________________________________________________________________ conv4_block19_0_bn (BatchNormal (None, 14, 14, 832) 3328 conv4_block18_concat[0][0] __________________________________________________________________________________________________ conv4_block19_0_relu (Activatio (None, 14, 14, 832) 0 conv4_block19_0_bn[0][0] __________________________________________________________________________________________________ conv4_block19_1_conv (Conv2D) (None, 14, 14, 128) 106496 conv4_block19_0_relu[0][0] __________________________________________________________________________________________________ conv4_block19_1_bn (BatchNormal (None, 14, 14, 128) 512 conv4_block19_1_conv[0][0] __________________________________________________________________________________________________ conv4_block19_1_relu (Activatio (None, 14, 14, 128) 0 conv4_block19_1_bn[0][0] __________________________________________________________________________________________________ conv4_block19_2_conv (Conv2D) (None, 14, 14, 32) 36864 conv4_block19_1_relu[0][0] __________________________________________________________________________________________________ conv4_block19_concat (Concatena (None, 14, 14, 864) 0 conv4_block18_concat[0][0] conv4_block19_2_conv[0][0] __________________________________________________________________________________________________ conv4_block20_0_bn (BatchNormal (None, 14, 14, 864) 3456 conv4_block19_concat[0][0] __________________________________________________________________________________________________ conv4_block20_0_relu (Activatio (None, 14, 14, 864) 0 conv4_block20_0_bn[0][0] __________________________________________________________________________________________________ conv4_block20_1_conv (Conv2D) (None, 14, 14, 128) 110592 conv4_block20_0_relu[0][0] __________________________________________________________________________________________________ conv4_block20_1_bn (BatchNormal (None, 14, 14, 128) 512 conv4_block20_1_conv[0][0] __________________________________________________________________________________________________ conv4_block20_1_relu (Activatio (None, 14, 14, 128) 0 conv4_block20_1_bn[0][0] __________________________________________________________________________________________________ conv4_block20_2_conv (Conv2D) (None, 14, 14, 32) 36864 conv4_block20_1_relu[0][0] __________________________________________________________________________________________________ conv4_block20_concat (Concatena (None, 14, 14, 896) 0 conv4_block19_concat[0][0] conv4_block20_2_conv[0][0] __________________________________________________________________________________________________ conv4_block21_0_bn (BatchNormal (None, 14, 14, 896) 3584 conv4_block20_concat[0][0] __________________________________________________________________________________________________ conv4_block21_0_relu (Activatio (None, 14, 14, 896) 0 conv4_block21_0_bn[0][0] __________________________________________________________________________________________________ conv4_block21_1_conv (Conv2D) (None, 14, 14, 128) 114688 conv4_block21_0_relu[0][0] __________________________________________________________________________________________________ conv4_block21_1_bn (BatchNormal (None, 14, 14, 128) 512 conv4_block21_1_conv[0][0] __________________________________________________________________________________________________ conv4_block21_1_relu (Activatio (None, 14, 14, 128) 0 conv4_block21_1_bn[0][0] __________________________________________________________________________________________________ conv4_block21_2_conv (Conv2D) (None, 14, 14, 32) 36864 conv4_block21_1_relu[0][0] __________________________________________________________________________________________________ conv4_block21_concat (Concatena (None, 14, 14, 928) 0 conv4_block20_concat[0][0] conv4_block21_2_conv[0][0] __________________________________________________________________________________________________ conv4_block22_0_bn (BatchNormal (None, 14, 14, 928) 3712 conv4_block21_concat[0][0] __________________________________________________________________________________________________ conv4_block22_0_relu (Activatio (None, 14, 14, 928) 0 conv4_block22_0_bn[0][0] __________________________________________________________________________________________________ conv4_block22_1_conv (Conv2D) (None, 14, 14, 128) 118784 conv4_block22_0_relu[0][0] __________________________________________________________________________________________________ conv4_block22_1_bn (BatchNormal (None, 14, 14, 128) 512 conv4_block22_1_conv[0][0] __________________________________________________________________________________________________ conv4_block22_1_relu (Activatio (None, 14, 14, 128) 0 conv4_block22_1_bn[0][0] __________________________________________________________________________________________________ conv4_block22_2_conv (Conv2D) (None, 14, 14, 32) 36864 conv4_block22_1_relu[0][0] __________________________________________________________________________________________________ conv4_block22_concat (Concatena (None, 14, 14, 960) 0 conv4_block21_concat[0][0] conv4_block22_2_conv[0][0] __________________________________________________________________________________________________ conv4_block23_0_bn (BatchNormal (None, 14, 14, 960) 3840 conv4_block22_concat[0][0] __________________________________________________________________________________________________ conv4_block23_0_relu (Activatio (None, 14, 14, 960) 0 conv4_block23_0_bn[0][0] __________________________________________________________________________________________________ conv4_block23_1_conv (Conv2D) (None, 14, 14, 128) 122880 conv4_block23_0_relu[0][0] __________________________________________________________________________________________________ conv4_block23_1_bn (BatchNormal (None, 14, 14, 128) 512 conv4_block23_1_conv[0][0] __________________________________________________________________________________________________ conv4_block23_1_relu (Activatio (None, 14, 14, 128) 0 conv4_block23_1_bn[0][0] __________________________________________________________________________________________________ conv4_block23_2_conv (Conv2D) (None, 14, 14, 32) 36864 conv4_block23_1_relu[0][0] __________________________________________________________________________________________________ conv4_block23_concat (Concatena (None, 14, 14, 992) 0 conv4_block22_concat[0][0] conv4_block23_2_conv[0][0] __________________________________________________________________________________________________ conv4_block24_0_bn (BatchNormal (None, 14, 14, 992) 3968 conv4_block23_concat[0][0] __________________________________________________________________________________________________ conv4_block24_0_relu (Activatio (None, 14, 14, 992) 0 conv4_block24_0_bn[0][0] __________________________________________________________________________________________________ conv4_block24_1_conv (Conv2D) (None, 14, 14, 128) 126976 conv4_block24_0_relu[0][0] __________________________________________________________________________________________________ conv4_block24_1_bn (BatchNormal (None, 14, 14, 128) 512 conv4_block24_1_conv[0][0] __________________________________________________________________________________________________ conv4_block24_1_relu (Activatio (None, 14, 14, 128) 0 conv4_block24_1_bn[0][0] __________________________________________________________________________________________________ conv4_block24_2_conv (Conv2D) (None, 14, 14, 32) 36864 conv4_block24_1_relu[0][0] __________________________________________________________________________________________________ conv4_block24_concat (Concatena (None, 14, 14, 1024) 0 conv4_block23_concat[0][0] conv4_block24_2_conv[0][0] __________________________________________________________________________________________________ pool4_bn (BatchNormalization) (None, 14, 14, 1024) 4096 conv4_block24_concat[0][0] __________________________________________________________________________________________________ pool4_relu (Activation) (None, 14, 14, 1024) 0 pool4_bn[0][0] __________________________________________________________________________________________________ pool4_conv (Conv2D) (None, 14, 14, 512) 524288 pool4_relu[0][0] __________________________________________________________________________________________________ pool4_pool (AveragePooling2D) (None, 7, 7, 512) 0 pool4_conv[0][0] __________________________________________________________________________________________________ conv5_block1_0_bn (BatchNormali (None, 7, 7, 512) 2048 pool4_pool[0][0] __________________________________________________________________________________________________ conv5_block1_0_relu (Activation (None, 7, 7, 512) 0 conv5_block1_0_bn[0][0] __________________________________________________________________________________________________ conv5_block1_1_conv (Conv2D) (None, 7, 7, 128) 65536 conv5_block1_0_relu[0][0] __________________________________________________________________________________________________ conv5_block1_1_bn (BatchNormali (None, 7, 7, 128) 512 conv5_block1_1_conv[0][0] __________________________________________________________________________________________________ conv5_block1_1_relu (Activation (None, 7, 7, 128) 0 conv5_block1_1_bn[0][0] __________________________________________________________________________________________________ conv5_block1_2_conv (Conv2D) (None, 7, 7, 32) 36864 conv5_block1_1_relu[0][0] __________________________________________________________________________________________________ conv5_block1_concat (Concatenat (None, 7, 7, 544) 0 pool4_pool[0][0] conv5_block1_2_conv[0][0] __________________________________________________________________________________________________ conv5_block2_0_bn (BatchNormali (None, 7, 7, 544) 2176 conv5_block1_concat[0][0] __________________________________________________________________________________________________ conv5_block2_0_relu (Activation (None, 7, 7, 544) 0 conv5_block2_0_bn[0][0] __________________________________________________________________________________________________ conv5_block2_1_conv (Conv2D) (None, 7, 7, 128) 69632 conv5_block2_0_relu[0][0] __________________________________________________________________________________________________ conv5_block2_1_bn (BatchNormali (None, 7, 7, 128) 512 conv5_block2_1_conv[0][0] __________________________________________________________________________________________________ conv5_block2_1_relu (Activation (None, 7, 7, 128) 0 conv5_block2_1_bn[0][0] __________________________________________________________________________________________________ conv5_block2_2_conv (Conv2D) (None, 7, 7, 32) 36864 conv5_block2_1_relu[0][0] __________________________________________________________________________________________________ conv5_block2_concat (Concatenat (None, 7, 7, 576) 0 conv5_block1_concat[0][0] conv5_block2_2_conv[0][0] __________________________________________________________________________________________________ conv5_block3_0_bn (BatchNormali (None, 7, 7, 576) 2304 conv5_block2_concat[0][0] __________________________________________________________________________________________________ conv5_block3_0_relu (Activation (None, 7, 7, 576) 0 conv5_block3_0_bn[0][0] __________________________________________________________________________________________________ conv5_block3_1_conv (Conv2D) (None, 7, 7, 128) 73728 conv5_block3_0_relu[0][0] __________________________________________________________________________________________________ conv5_block3_1_bn (BatchNormali (None, 7, 7, 128) 512 conv5_block3_1_conv[0][0] __________________________________________________________________________________________________ conv5_block3_1_relu (Activation (None, 7, 7, 128) 0 conv5_block3_1_bn[0][0] __________________________________________________________________________________________________ conv5_block3_2_conv (Conv2D) (None, 7, 7, 32) 36864 conv5_block3_1_relu[0][0] __________________________________________________________________________________________________ conv5_block3_concat (Concatenat (None, 7, 7, 608) 0 conv5_block2_concat[0][0] conv5_block3_2_conv[0][0] __________________________________________________________________________________________________ conv5_block4_0_bn (BatchNormali (None, 7, 7, 608) 2432 conv5_block3_concat[0][0] __________________________________________________________________________________________________ conv5_block4_0_relu (Activation (None, 7, 7, 608) 0 conv5_block4_0_bn[0][0] __________________________________________________________________________________________________ conv5_block4_1_conv (Conv2D) (None, 7, 7, 128) 77824 conv5_block4_0_relu[0][0] __________________________________________________________________________________________________ conv5_block4_1_bn (BatchNormali (None, 7, 7, 128) 512 conv5_block4_1_conv[0][0] __________________________________________________________________________________________________ conv5_block4_1_relu (Activation (None, 7, 7, 128) 0 conv5_block4_1_bn[0][0] __________________________________________________________________________________________________ conv5_block4_2_conv (Conv2D) (None, 7, 7, 32) 36864 conv5_block4_1_relu[0][0] __________________________________________________________________________________________________ conv5_block4_concat (Concatenat (None, 7, 7, 640) 0 conv5_block3_concat[0][0] conv5_block4_2_conv[0][0] __________________________________________________________________________________________________ conv5_block5_0_bn (BatchNormali (None, 7, 7, 640) 2560 conv5_block4_concat[0][0] __________________________________________________________________________________________________ conv5_block5_0_relu (Activation (None, 7, 7, 640) 0 conv5_block5_0_bn[0][0] __________________________________________________________________________________________________ conv5_block5_1_conv (Conv2D) (None, 7, 7, 128) 81920 conv5_block5_0_relu[0][0] __________________________________________________________________________________________________ conv5_block5_1_bn (BatchNormali (None, 7, 7, 128) 512 conv5_block5_1_conv[0][0] __________________________________________________________________________________________________ conv5_block5_1_relu (Activation (None, 7, 7, 128) 0 conv5_block5_1_bn[0][0] __________________________________________________________________________________________________ conv5_block5_2_conv (Conv2D) (None, 7, 7, 32) 36864 conv5_block5_1_relu[0][0] __________________________________________________________________________________________________ conv5_block5_concat (Concatenat (None, 7, 7, 672) 0 conv5_block4_concat[0][0] conv5_block5_2_conv[0][0] __________________________________________________________________________________________________ conv5_block6_0_bn (BatchNormali (None, 7, 7, 672) 2688 conv5_block5_concat[0][0] __________________________________________________________________________________________________ conv5_block6_0_relu (Activation (None, 7, 7, 672) 0 conv5_block6_0_bn[0][0] __________________________________________________________________________________________________ conv5_block6_1_conv (Conv2D) (None, 7, 7, 128) 86016 conv5_block6_0_relu[0][0] __________________________________________________________________________________________________ conv5_block6_1_bn (BatchNormali (None, 7, 7, 128) 512 conv5_block6_1_conv[0][0] __________________________________________________________________________________________________ conv5_block6_1_relu (Activation (None, 7, 7, 128) 0 conv5_block6_1_bn[0][0] __________________________________________________________________________________________________ conv5_block6_2_conv (Conv2D) (None, 7, 7, 32) 36864 conv5_block6_1_relu[0][0] __________________________________________________________________________________________________ conv5_block6_concat (Concatenat (None, 7, 7, 704) 0 conv5_block5_concat[0][0] conv5_block6_2_conv[0][0] __________________________________________________________________________________________________ conv5_block7_0_bn (BatchNormali (None, 7, 7, 704) 2816 conv5_block6_concat[0][0] __________________________________________________________________________________________________ conv5_block7_0_relu (Activation (None, 7, 7, 704) 0 conv5_block7_0_bn[0][0] __________________________________________________________________________________________________ conv5_block7_1_conv (Conv2D) (None, 7, 7, 128) 90112 conv5_block7_0_relu[0][0] __________________________________________________________________________________________________ conv5_block7_1_bn (BatchNormali (None, 7, 7, 128) 512 conv5_block7_1_conv[0][0] __________________________________________________________________________________________________ conv5_block7_1_relu (Activation (None, 7, 7, 128) 0 conv5_block7_1_bn[0][0] __________________________________________________________________________________________________ conv5_block7_2_conv (Conv2D) (None, 7, 7, 32) 36864 conv5_block7_1_relu[0][0] __________________________________________________________________________________________________ conv5_block7_concat (Concatenat (None, 7, 7, 736) 0 conv5_block6_concat[0][0] conv5_block7_2_conv[0][0] __________________________________________________________________________________________________ conv5_block8_0_bn (BatchNormali (None, 7, 7, 736) 2944 conv5_block7_concat[0][0] __________________________________________________________________________________________________ conv5_block8_0_relu (Activation (None, 7, 7, 736) 0 conv5_block8_0_bn[0][0] __________________________________________________________________________________________________ conv5_block8_1_conv (Conv2D) (None, 7, 7, 128) 94208 conv5_block8_0_relu[0][0] __________________________________________________________________________________________________ conv5_block8_1_bn (BatchNormali (None, 7, 7, 128) 512 conv5_block8_1_conv[0][0] __________________________________________________________________________________________________ conv5_block8_1_relu (Activation (None, 7, 7, 128) 0 conv5_block8_1_bn[0][0] __________________________________________________________________________________________________ conv5_block8_2_conv (Conv2D) (None, 7, 7, 32) 36864 conv5_block8_1_relu[0][0] __________________________________________________________________________________________________ conv5_block8_concat (Concatenat (None, 7, 7, 768) 0 conv5_block7_concat[0][0] conv5_block8_2_conv[0][0] __________________________________________________________________________________________________ conv5_block9_0_bn (BatchNormali (None, 7, 7, 768) 3072 conv5_block8_concat[0][0] __________________________________________________________________________________________________ conv5_block9_0_relu (Activation (None, 7, 7, 768) 0 conv5_block9_0_bn[0][0] __________________________________________________________________________________________________ conv5_block9_1_conv (Conv2D) (None, 7, 7, 128) 98304 conv5_block9_0_relu[0][0] __________________________________________________________________________________________________ conv5_block9_1_bn (BatchNormali (None, 7, 7, 128) 512 conv5_block9_1_conv[0][0] __________________________________________________________________________________________________ conv5_block9_1_relu (Activation (None, 7, 7, 128) 0 conv5_block9_1_bn[0][0] __________________________________________________________________________________________________ conv5_block9_2_conv (Conv2D) (None, 7, 7, 32) 36864 conv5_block9_1_relu[0][0] __________________________________________________________________________________________________ conv5_block9_concat (Concatenat (None, 7, 7, 800) 0 conv5_block8_concat[0][0] conv5_block9_2_conv[0][0] __________________________________________________________________________________________________ conv5_block10_0_bn (BatchNormal (None, 7, 7, 800) 3200 conv5_block9_concat[0][0] __________________________________________________________________________________________________ conv5_block10_0_relu (Activatio (None, 7, 7, 800) 0 conv5_block10_0_bn[0][0] __________________________________________________________________________________________________ conv5_block10_1_conv (Conv2D) (None, 7, 7, 128) 102400 conv5_block10_0_relu[0][0] __________________________________________________________________________________________________ conv5_block10_1_bn (BatchNormal (None, 7, 7, 128) 512 conv5_block10_1_conv[0][0] __________________________________________________________________________________________________ conv5_block10_1_relu (Activatio (None, 7, 7, 128) 0 conv5_block10_1_bn[0][0] __________________________________________________________________________________________________ conv5_block10_2_conv (Conv2D) (None, 7, 7, 32) 36864 conv5_block10_1_relu[0][0] __________________________________________________________________________________________________ conv5_block10_concat (Concatena (None, 7, 7, 832) 0 conv5_block9_concat[0][0] conv5_block10_2_conv[0][0] __________________________________________________________________________________________________ conv5_block11_0_bn (BatchNormal (None, 7, 7, 832) 3328 conv5_block10_concat[0][0] __________________________________________________________________________________________________ conv5_block11_0_relu (Activatio (None, 7, 7, 832) 0 conv5_block11_0_bn[0][0] __________________________________________________________________________________________________ conv5_block11_1_conv (Conv2D) (None, 7, 7, 128) 106496 conv5_block11_0_relu[0][0] __________________________________________________________________________________________________ conv5_block11_1_bn (BatchNormal (None, 7, 7, 128) 512 conv5_block11_1_conv[0][0] __________________________________________________________________________________________________ conv5_block11_1_relu (Activatio (None, 7, 7, 128) 0 conv5_block11_1_bn[0][0] __________________________________________________________________________________________________ conv5_block11_2_conv (Conv2D) (None, 7, 7, 32) 36864 conv5_block11_1_relu[0][0] __________________________________________________________________________________________________ conv5_block11_concat (Concatena (None, 7, 7, 864) 0 conv5_block10_concat[0][0] conv5_block11_2_conv[0][0] __________________________________________________________________________________________________ conv5_block12_0_bn (BatchNormal (None, 7, 7, 864) 3456 conv5_block11_concat[0][0] __________________________________________________________________________________________________ conv5_block12_0_relu (Activatio (None, 7, 7, 864) 0 conv5_block12_0_bn[0][0] __________________________________________________________________________________________________ conv5_block12_1_conv (Conv2D) (None, 7, 7, 128) 110592 conv5_block12_0_relu[0][0] __________________________________________________________________________________________________ conv5_block12_1_bn (BatchNormal (None, 7, 7, 128) 512 conv5_block12_1_conv[0][0] __________________________________________________________________________________________________ conv5_block12_1_relu (Activatio (None, 7, 7, 128) 0 conv5_block12_1_bn[0][0] __________________________________________________________________________________________________ conv5_block12_2_conv (Conv2D) (None, 7, 7, 32) 36864 conv5_block12_1_relu[0][0] __________________________________________________________________________________________________ conv5_block12_concat (Concatena (None, 7, 7, 896) 0 conv5_block11_concat[0][0] conv5_block12_2_conv[0][0] __________________________________________________________________________________________________ conv5_block13_0_bn (BatchNormal (None, 7, 7, 896) 3584 conv5_block12_concat[0][0] __________________________________________________________________________________________________ conv5_block13_0_relu (Activatio (None, 7, 7, 896) 0 conv5_block13_0_bn[0][0] __________________________________________________________________________________________________ conv5_block13_1_conv (Conv2D) (None, 7, 7, 128) 114688 conv5_block13_0_relu[0][0] __________________________________________________________________________________________________ conv5_block13_1_bn (BatchNormal (None, 7, 7, 128) 512 conv5_block13_1_conv[0][0] __________________________________________________________________________________________________ conv5_block13_1_relu (Activatio (None, 7, 7, 128) 0 conv5_block13_1_bn[0][0] __________________________________________________________________________________________________ conv5_block13_2_conv (Conv2D) (None, 7, 7, 32) 36864 conv5_block13_1_relu[0][0] __________________________________________________________________________________________________ conv5_block13_concat (Concatena (None, 7, 7, 928) 0 conv5_block12_concat[0][0] conv5_block13_2_conv[0][0] __________________________________________________________________________________________________ conv5_block14_0_bn (BatchNormal (None, 7, 7, 928) 3712 conv5_block13_concat[0][0] __________________________________________________________________________________________________ conv5_block14_0_relu (Activatio (None, 7, 7, 928) 0 conv5_block14_0_bn[0][0] __________________________________________________________________________________________________ conv5_block14_1_conv (Conv2D) (None, 7, 7, 128) 118784 conv5_block14_0_relu[0][0] __________________________________________________________________________________________________ conv5_block14_1_bn (BatchNormal (None, 7, 7, 128) 512 conv5_block14_1_conv[0][0] __________________________________________________________________________________________________ conv5_block14_1_relu (Activatio (None, 7, 7, 128) 0 conv5_block14_1_bn[0][0] __________________________________________________________________________________________________ conv5_block14_2_conv (Conv2D) (None, 7, 7, 32) 36864 conv5_block14_1_relu[0][0] __________________________________________________________________________________________________ conv5_block14_concat (Concatena (None, 7, 7, 960) 0 conv5_block13_concat[0][0] conv5_block14_2_conv[0][0] __________________________________________________________________________________________________ conv5_block15_0_bn (BatchNormal (None, 7, 7, 960) 3840 conv5_block14_concat[0][0] __________________________________________________________________________________________________ conv5_block15_0_relu (Activatio (None, 7, 7, 960) 0 conv5_block15_0_bn[0][0] __________________________________________________________________________________________________ conv5_block15_1_conv (Conv2D) (None, 7, 7, 128) 122880 conv5_block15_0_relu[0][0] __________________________________________________________________________________________________ conv5_block15_1_bn (BatchNormal (None, 7, 7, 128) 512 conv5_block15_1_conv[0][0] __________________________________________________________________________________________________ conv5_block15_1_relu (Activatio (None, 7, 7, 128) 0 conv5_block15_1_bn[0][0] __________________________________________________________________________________________________ conv5_block15_2_conv (Conv2D) (None, 7, 7, 32) 36864 conv5_block15_1_relu[0][0] __________________________________________________________________________________________________ conv5_block15_concat (Concatena (None, 7, 7, 992) 0 conv5_block14_concat[0][0] conv5_block15_2_conv[0][0] __________________________________________________________________________________________________ conv5_block16_0_bn (BatchNormal (None, 7, 7, 992) 3968 conv5_block15_concat[0][0] __________________________________________________________________________________________________ conv5_block16_0_relu (Activatio (None, 7, 7, 992) 0 conv5_block16_0_bn[0][0] __________________________________________________________________________________________________ conv5_block16_1_conv (Conv2D) (None, 7, 7, 128) 126976 conv5_block16_0_relu[0][0] __________________________________________________________________________________________________ conv5_block16_1_bn (BatchNormal (None, 7, 7, 128) 512 conv5_block16_1_conv[0][0] __________________________________________________________________________________________________ conv5_block16_1_relu (Activatio (None, 7, 7, 128) 0 conv5_block16_1_bn[0][0] __________________________________________________________________________________________________ conv5_block16_2_conv (Conv2D) (None, 7, 7, 32) 36864 conv5_block16_1_relu[0][0] __________________________________________________________________________________________________ conv5_block16_concat (Concatena (None, 7, 7, 1024) 0 conv5_block15_concat[0][0] conv5_block16_2_conv[0][0] __________________________________________________________________________________________________ bn (BatchNormalization) (None, 7, 7, 1024) 4096 conv5_block16_concat[0][0] __________________________________________________________________________________________________ relu (Activation) (None, 7, 7, 1024) 0 bn[0][0] __________________________________________________________________________________________________ global_average_pooling2d (Globa (None, 1024) 0 relu[0][0] __________________________________________________________________________________________________ chexnet_output (Dense) (None, 14) 14350 global_average_pooling2d[0][0] ================================================================================================== Total params: 7,051,854 Trainable params: 6,968,206 Non-trainable params: 83,648 __________________________________________________________________________________________________
Atelectasis,Cardiomegaly,Effusion,Infiltration,Mass,Nodule,Pneumonia,Pneumothorax,Consolidation,Edema,Emphysema,Fibrosis,Pleural_Thickening,Hernia
df = pd.read_csv(labels_file)
df
Unnamed: 0 | Image Index | Finding Labels | Follow-up # | Patient ID | Patient Age | Patient Gender | View Position | OriginalImage[Width | Height] | OriginalImagePixelSpacing[x | y] | |
---|---|---|---|---|---|---|---|---|---|---|---|---|
0 | 0 | 00000001_000.png | Cardiomegaly | 0 | 1 | 57 | M | PA | 2682 | 2749 | 0.143 | 0.143 |
1 | 4 | 00000003_001.png | Hernia | 0 | 3 | 74 | F | PA | 2500 | 2048 | 0.168 | 0.168 |
2 | 5 | 00000003_002.png | Hernia | 1 | 3 | 75 | F | PA | 2048 | 2500 | 0.168 | 0.168 |
3 | 7 | 00000003_004.png | Hernia | 3 | 3 | 77 | F | PA | 2500 | 2048 | 0.168 | 0.168 |
4 | 8 | 00000003_005.png | Hernia | 4 | 3 | 78 | F | PA | 2686 | 2991 | 0.143 | 0.143 |
... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
1416 | 4978 | 00001330_002.png | Effusion | 2 | 1330 | 75 | M | AP | 3056 | 2544 | 0.139 | 0.139 |
1417 | 4979 | 00001330_003.png | Infiltration | 3 | 1330 | 75 | M | PA | 2992 | 2991 | 0.143 | 0.143 |
1418 | 4980 | 00001330_004.png | Effusion | 4 | 1330 | 75 | M | AP | 3056 | 2544 | 0.139 | 0.139 |
1419 | 4986 | 00001333_000.png | Nodule | 0 | 1333 | 47 | M | PA | 2500 | 2048 | 0.171 | 0.171 |
1420 | 4998 | 00001335_006.png | Infiltration | 6 | 1335 | 23 | F | PA | 2992 | 2729 | 0.143 | 0.143 |
1421 rows × 12 columns
fig = plt.figure(figsize=(30,4))
ax = sns.countplot(x="Finding Labels", data=df)
plt.title('Categorical distribution in the original dataset')
plt.tight_layout()
training_df_list = []
for cname in class_name:
training_df_list.append(df[df['Finding Labels']==cname].sample(14))
train_df = pd.concat(training_df_list)
train_df
Unnamed: 0 | Image Index | Finding Labels | Follow-up # | Patient ID | Patient Age | Patient Gender | View Position | OriginalImage[Width | Height] | OriginalImagePixelSpacing[x | y] | |
---|---|---|---|---|---|---|---|---|---|---|---|---|
691 | 2217 | 00000583_009.png | Atelectasis | 7 | 583 | 40 | F | AP | 2500 | 2048 | 0.168000 | 0.168000 |
15 | 34 | 00000011_006.png | Atelectasis | 6 | 11 | 75 | M | PA | 2992 | 2991 | 0.143000 | 0.143000 |
1184 | 4107 | 00001108_001.png | Atelectasis | 2 | 1108 | 59 | M | PA | 2020 | 2021 | 0.194311 | 0.194311 |
453 | 1396 | 00000368_004.png | Atelectasis | 0 | 368 | 35 | M | PA | 2992 | 2991 | 0.143000 | 0.143000 |
1177 | 4076 | 00001104_008.png | Atelectasis | 8 | 1104 | 54 | M | PA | 2500 | 2048 | 0.168000 | 0.168000 |
... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
6 | 10 | 00000003_007.png | Hernia | 6 | 3 | 80 | F | PA | 2582 | 2905 | 0.143000 | 0.143000 |
1012 | 3491 | 00000942_001.png | Hernia | 1 | 942 | 48 | F | PA | 2048 | 2500 | 0.171000 | 0.171000 |
385 | 1158 | 00000284_000.png | Hernia | 5 | 284 | 86 | F | PA | 2992 | 2991 | 0.143000 | 0.143000 |
383 | 1154 | 00000284_002.png | Hernia | 1 | 284 | 81 | F | PA | 2500 | 2048 | 0.168000 | 0.168000 |
4 | 8 | 00000003_005.png | Hernia | 4 | 3 | 78 | F | PA | 2686 | 2991 | 0.143000 | 0.143000 |
196 rows × 12 columns
fig = plt.figure(figsize=(30,4))
ax = sns.countplot(x="Finding Labels", data=train_df)
plt.title('Categorical distribution in the training dataset')
plt.tight_layout()
from keras.optimizers import Adam
from sklearn.metrics import classification_report,confusion_matrix
indexes = random.sample(range(len(train_df)), len(train_df))
name = train_df['Image Index'].iloc[indexes]
true_label = train_df['Finding Labels'].iloc[indexes]
class_name = "Atelectasis,Cardiomegaly,Effusion,Infiltration,Mass,Nodule,Pneumonia,Pneumothorax,Consolidation,Edema,Emphysema,Fibrosis,Pleural_Thickening,Hernia".split(',')
gr_truth = []
for i in range(len(true_label)):
temp = [0]*len(class_name)
temp[class_name.index(true_label.iloc[i])] = 1
gr_truth.append(temp)
gr_truth = np.array(gr_truth)
img_data = []
for i in range(1, len(indexes)+1):
test_img = cv2.imread(image_folder+name.iloc[i-1])/255
test_img = cv2.resize(test_img,(224,224), interpolation = cv2.INTER_NEAREST)
img_data.append(test_img)
img_data = np.array(img_data)
opt = Adam(lr=1e-4)
model.compile(optimizer = opt , loss = 'mse' , metrics = ['accuracy'])
history = model.fit(img_data,gr_truth,epochs = 60, batch_size=8)
Epoch 1/60 25/25 [==============================] - 12s 106ms/step - loss: 0.1161 - accuracy: 0.0816 Epoch 2/60 25/25 [==============================] - 3s 103ms/step - loss: 0.0997 - accuracy: 0.1224 Epoch 3/60 25/25 [==============================] - 3s 104ms/step - loss: 0.0884 - accuracy: 0.1480 Epoch 4/60 25/25 [==============================] - 3s 105ms/step - loss: 0.0791 - accuracy: 0.1786 Epoch 5/60 25/25 [==============================] - 3s 105ms/step - loss: 0.0739 - accuracy: 0.1837 Epoch 6/60 25/25 [==============================] - 3s 106ms/step - loss: 0.0690 - accuracy: 0.2398 Epoch 7/60 25/25 [==============================] - 3s 106ms/step - loss: 0.0662 - accuracy: 0.2449 Epoch 8/60 25/25 [==============================] - 3s 106ms/step - loss: 0.0635 - accuracy: 0.2959 Epoch 9/60 25/25 [==============================] - 3s 105ms/step - loss: 0.0600 - accuracy: 0.4031 Epoch 10/60 25/25 [==============================] - 3s 106ms/step - loss: 0.0588 - accuracy: 0.3980 Epoch 11/60 25/25 [==============================] - 3s 105ms/step - loss: 0.0572 - accuracy: 0.4133 Epoch 12/60 25/25 [==============================] - 3s 105ms/step - loss: 0.0558 - accuracy: 0.4235 Epoch 13/60 25/25 [==============================] - 3s 105ms/step - loss: 0.0547 - accuracy: 0.4184 Epoch 14/60 25/25 [==============================] - 3s 104ms/step - loss: 0.0531 - accuracy: 0.4745 Epoch 15/60 25/25 [==============================] - 3s 106ms/step - loss: 0.0524 - accuracy: 0.4847 Epoch 16/60 25/25 [==============================] - 3s 104ms/step - loss: 0.0503 - accuracy: 0.4949 Epoch 17/60 25/25 [==============================] - 3s 104ms/step - loss: 0.0487 - accuracy: 0.5357 Epoch 18/60 25/25 [==============================] - 3s 104ms/step - loss: 0.0467 - accuracy: 0.5459 Epoch 19/60 25/25 [==============================] - 3s 105ms/step - loss: 0.0455 - accuracy: 0.5561 Epoch 20/60 25/25 [==============================] - 3s 104ms/step - loss: 0.0434 - accuracy: 0.5816 Epoch 21/60 25/25 [==============================] - 3s 104ms/step - loss: 0.0418 - accuracy: 0.5663 Epoch 22/60 25/25 [==============================] - 3s 103ms/step - loss: 0.0419 - accuracy: 0.5969 Epoch 23/60 25/25 [==============================] - 3s 104ms/step - loss: 0.0387 - accuracy: 0.6224 Epoch 24/60 25/25 [==============================] - 3s 104ms/step - loss: 0.0374 - accuracy: 0.6633 Epoch 25/60 25/25 [==============================] - 3s 104ms/step - loss: 0.0344 - accuracy: 0.7143 Epoch 26/60 25/25 [==============================] - 3s 103ms/step - loss: 0.0319 - accuracy: 0.7143 Epoch 27/60 25/25 [==============================] - 3s 104ms/step - loss: 0.0305 - accuracy: 0.7347 Epoch 28/60 25/25 [==============================] - 3s 103ms/step - loss: 0.0295 - accuracy: 0.7500 Epoch 29/60 25/25 [==============================] - 3s 103ms/step - loss: 0.0282 - accuracy: 0.7704 Epoch 30/60 25/25 [==============================] - 3s 104ms/step - loss: 0.0254 - accuracy: 0.7959 Epoch 31/60 25/25 [==============================] - 3s 105ms/step - loss: 0.0227 - accuracy: 0.8163 Epoch 32/60 25/25 [==============================] - 3s 105ms/step - loss: 0.0208 - accuracy: 0.8265 Epoch 33/60 25/25 [==============================] - 3s 104ms/step - loss: 0.0187 - accuracy: 0.8520 Epoch 34/60 25/25 [==============================] - 3s 104ms/step - loss: 0.0184 - accuracy: 0.8724 Epoch 35/60 25/25 [==============================] - 3s 104ms/step - loss: 0.0152 - accuracy: 0.8980 Epoch 36/60 25/25 [==============================] - 3s 105ms/step - loss: 0.0138 - accuracy: 0.9082 Epoch 37/60 25/25 [==============================] - 3s 104ms/step - loss: 0.0125 - accuracy: 0.9133 Epoch 38/60 25/25 [==============================] - 3s 104ms/step - loss: 0.0110 - accuracy: 0.9184 Epoch 39/60 25/25 [==============================] - 3s 104ms/step - loss: 0.0113 - accuracy: 0.9184 Epoch 40/60 25/25 [==============================] - 3s 104ms/step - loss: 0.0092 - accuracy: 0.9286 Epoch 41/60 25/25 [==============================] - 3s 104ms/step - loss: 0.0091 - accuracy: 0.9235 Epoch 42/60 25/25 [==============================] - 3s 104ms/step - loss: 0.0091 - accuracy: 0.9235 Epoch 43/60 25/25 [==============================] - 3s 104ms/step - loss: 0.0089 - accuracy: 0.9235 Epoch 44/60 25/25 [==============================] - 3s 105ms/step - loss: 0.0080 - accuracy: 0.9286 Epoch 45/60 25/25 [==============================] - 3s 104ms/step - loss: 0.0075 - accuracy: 0.9286 Epoch 46/60 25/25 [==============================] - 3s 106ms/step - loss: 0.0075 - accuracy: 0.9286 Epoch 47/60 25/25 [==============================] - 3s 105ms/step - loss: 0.0073 - accuracy: 0.9286 Epoch 48/60 25/25 [==============================] - 3s 104ms/step - loss: 0.0071 - accuracy: 0.9286 Epoch 49/60 25/25 [==============================] - 3s 104ms/step - loss: 0.0068 - accuracy: 0.9286 Epoch 50/60 25/25 [==============================] - 3s 104ms/step - loss: 0.0070 - accuracy: 0.9286 Epoch 51/60 25/25 [==============================] - 3s 104ms/step - loss: 0.0084 - accuracy: 0.9082 Epoch 52/60 25/25 [==============================] - 3s 104ms/step - loss: 0.0066 - accuracy: 0.9286 Epoch 53/60 25/25 [==============================] - 3s 105ms/step - loss: 0.0067 - accuracy: 0.9286 Epoch 54/60 25/25 [==============================] - 3s 104ms/step - loss: 0.0071 - accuracy: 0.9286 Epoch 55/60 25/25 [==============================] - 3s 104ms/step - loss: 0.0068 - accuracy: 0.9286 Epoch 56/60 25/25 [==============================] - 3s 106ms/step - loss: 0.0067 - accuracy: 0.9286 Epoch 57/60 25/25 [==============================] - 3s 105ms/step - loss: 0.0067 - accuracy: 0.9286 Epoch 58/60 25/25 [==============================] - 3s 104ms/step - loss: 0.0070 - accuracy: 0.9286 Epoch 59/60 25/25 [==============================] - 3s 105ms/step - loss: 0.0066 - accuracy: 0.9286 Epoch 60/60 25/25 [==============================] - 3s 104ms/step - loss: 0.0069 - accuracy: 0.9286
plt.plot(history.history['accuracy'])
plt.title('model accuracy')
plt.ylabel('accuracy')
plt.xlabel('epoch')
plt.legend(['train'], loc='upper left')
plt.show()
model.save_weights('/content/drive/My Drive/Medical image Reporting/ChexNet weights/chexnet_retrained-14x14.h5')
indexes = random.sample(range(len(df)), 10)
name = df['Image Index'].iloc[indexes]
true_label = df['Finding Labels'].iloc[indexes]
fig = plt.figure(figsize=(30,12))
columns = 5
rows = 2
for i in range(1, len(indexes)+1):
test_img = cv2.imread(image_folder+name.iloc[i-1], cv2.IMREAD_UNCHANGED)/255
test_img = cv2.resize(test_img,(224,224), interpolation = cv2.INTER_NEAREST)
fig.add_subplot(rows, columns, i)
plt.imshow(test_img, cmap='gray')
plt.text(0,-8, true_label.iloc[i-1], ha='left', fontsize=20)
plt.axis('off')
plt.tight_layout()
index = random.randint(0,len(train_df))
name = train_df['Image Index'].iloc[index]
true_label = train_df['Finding Labels'].iloc[index]
print(name, true_label)
00001104_008.png Atelectasis
# !ls '/content/drive/My Drive/Medical image Reporting/data/images'
def get_image_tensor(name, label=None ,plot=False):
test_img = cv2.imread(image_folder+name, cv2.IMREAD_UNCHANGED)/255
test_img = test_img.reshape((test_img.shape[0], test_img.shape[1], 1))
test_img = np.concatenate([test_img, test_img, test_img], axis=2)
image1 = tf.expand_dims(cv2.resize(test_img,(224,224), interpolation = cv2.INTER_NEAREST),axis=0)
if plot:
fig = plt.figure(figsize = (8,8))
plt.imshow(test_img[:,:,-1], cmap='gray')
plt.axis('off')
plt.tight_layout()
if label != None:
plt.title(f"True value: {label}")
return image1
image = get_image_tensor(name, label=true_label, plot=True)
predict_chexnet(image, model, plot=True)
'Atelectasis 0.97'
indexes = random.sample(range(len(train_df)), 10)
name = train_df['Image Index'].iloc[indexes]
true_label = train_df['Finding Labels'].iloc[indexes]
fig = plt.figure(figsize=(30,12))
columns = 5
rows = 2
for i in range(1, len(indexes)+1):
test_img = cv2.imread(image_folder+name.iloc[i-1])/255
image1 = tf.expand_dims(cv2.resize(test_img,(224,224), interpolation = cv2.INTER_NEAREST), axis=0)
p = predict_chexnet(image1,model,plot=False)
# The output predicted by the ChexNet model
fig.add_subplot(rows, columns, i)
plt.imshow(test_img, cmap='gray')
plt.text(0,-8, f"real:{true_label.iloc[i-1]} | pred:{p}", ha='left', fontsize=14)
plt.axis('off')
plt.tight_layout()
from keras import backend as kb
index = random.randint(0,len(train_df))
name = train_df['Image Index'].iloc[index]
true_label = train_df['Finding Labels'].iloc[index]
print(name, true_label)
00000305_001.png Atelectasis
img_array = cv2.imread(image_folder+name, cv2.IMREAD_UNCHANGED)/255
img_array = img_array.reshape((img_array.shape[0], img_array.shape[1], 1))
img_array = np.concatenate([img_array,img_array,img_array], axis=2)
img_array = cv2.resize(img_array,(224,224),interpolation = cv2.INTER_NEAREST)
array = np.expand_dims(img_array, axis=0)
def get_output_layer(model, layer_name):
# get the symbolic outputs of each "key" layer (we gave them unique names).
layer_dict = dict([(layer.name, layer) for layer in model.layers])
layer = layer_dict[layer_name]
return layer
class_weights = model.layers[-1].get_weights()[0]
final_conv_layer = get_output_layer(model, "bn")
get_output = kb.function([model.layers[0].input], [final_conv_layer.output, model.layers[-1].output])
[conv_outputs, predictions] = get_output(array)
conv_outputs = conv_outputs[0, :, :, :]
# Create the class activation map.
cam = np.zeros(dtype=np.float32, shape=(conv_outputs.shape[:2]))
for i in range(class_weights.shape[0]):
w = class_weights[i,np.argmax(predictions)]
cam += w * conv_outputs[:, :, i]
# print(f"predictions: {predictions}")
cam /= np.max(cam)
_cam = cv2.resize(cam, (224,224))
jet = cm.get_cmap("jet")
jet_colors = jet(np.arange(256))[:, :3]
jet_heatmap = jet_colors[np.uint8(255*_cam)]
jet_heatmap[np.where(_cam < 0.1)] = 0
cam_img = jet_heatmap * 0.3 + img_array * 0.7
fig, axs = plt.subplots(1,3, figsize=(24,8))
axs[0].imshow(cam)
axs[0].set_title('channel activation layer weights')
axs[0].axis('off')
axs[1].imshow(_cam)
axs[1].axis('off')
axs[1].set_title('scaled and smoothened weight')
axs[2].imshow(jet_heatmap)
axs[2].axis('off')
axs[2].set_title('Heatmap of probality')
plt.tight_layout()
#Display Grad CAM
fig, axs = plt.subplots(1,2, figsize = (22,10))
axs[1].imshow(cam_img)
axs[1].axis('off')
axs[0].imshow(img_array[:,:,0], cmap='gray')
axs[0].axis('off')
plt.tight_layout()
indexes = random.sample(range(len(train_df)), 10)
name = train_df['Image Index'].iloc[indexes]
true_label = train_df['Finding Labels'].iloc[indexes]
fig = plt.figure(figsize=(30,12))
columns = 5
rows = 2
for plot_i in range(1, len(indexes)+1):
test_img = cv2.imread(image_folder+name.iloc[plot_i-1])/255
img_array = cv2.resize(test_img,(224,224),interpolation = cv2.INTER_NEAREST)
array = np.expand_dims(img_array, axis=0)
image1 = tf.expand_dims(cv2.resize(test_img,(224,224),interpolation = cv2.INTER_NEAREST),axis=0)
p = predict_chexnet(image1, model, plot=False)
class_weights = model.layers[-1].get_weights()[0]
final_conv_layer = get_output_layer(model, "bn")
get_output = kb.function([model.layers[0].input], [final_conv_layer.output, model.layers[-1].output])
[conv_outputs, predictions] = get_output(array)
conv_outputs = conv_outputs[0, :, :, :]
# Create the class activation map.
cam = np.zeros(dtype=np.float32, shape=(conv_outputs.shape[:2]))
for i in range(class_weights.shape[0]):
w = class_weights[i,np.argmax(predictions)]
cam += w * conv_outputs[:, :, i]
# print(f"predictions: {predictions}")
cam /= np.max(cam)
_cam = cv2.resize(cam, (224,224))
jet = cm.get_cmap("jet")
jet_colors = jet(np.arange(256))[:, :3]
jet_heatmap = jet_colors[np.uint8(255*_cam)]
jet_heatmap[np.where(_cam < 0.2)] = 0
cam_img = jet_heatmap * 0.3 + img_array * 0.7
fig.add_subplot(rows, columns, plot_i)
plt.imshow(cam_img)
plt.text(0,-8, f"real:{true_label.iloc[plot_i-1]} | pred:{p}", ha='left', fontsize=12)
plt.axis('off')
plt.tight_layout()
Comparison: two view chest from
Indication: year old male with
Findings: heart size within normal limits . mild hyperinflation of the lungs . mild pectus excavatum deformity . stable left mid lung calcified granuloma . no focal airspace disease . no pneumothorax or effusions .
Impression: changes of chronic lung disease with no acute cardiopulmonary finding .
Comparison: none .
Indication: chest ray positvie tb . language unable to get more hx
Findings: nan
Impression: normal heart size . normal pulmonary vasculature . normal mediastinal contours . lung parenchyma is clear . no airspace disease . no pulmonary edema . no of pleural effusions . no of active tuberculosis . no of active cardiopulmonary disease .
Comparison: none
Indication: chest radiograph prior to initiation of medication
Findings: the lungs appear clear . the heart and pulmonary are normal . mediastinal contours are normal . the pleural spaces are clear .
Impression: no acute cardiopulmonary disease .
class Image_encoder(tf.keras.layers.Layer):
"""
This layer will output image backbone features after passing it through chexnet
"""
def __init__(self,
name = "image_encoder_block"
):
super().__init__()
_, self.chexnet = create_chexnet(input_size = (224,224))
self.chexnet.trainable = False
self.avgpool = AveragePooling2D()
# for i in range(10): #the last 10 layers of chexnet will be trained
# self.chexnet.layers[-i].trainable = True
def call(self,data):
op = self.chexnet(data) #op shape: (None,7,7,1024)
op = self.avgpool(op) #op shape (None,3,3,1024)
op = tf.reshape(op,shape = (-1,op.shape[1]*op.shape[2],op.shape[3])) #op shape: (None,9,1024)
return op
def encoder(image1,image2,dense_dim,dropout_rate):
"""
Takes image1,image2
gets the final encoded vector of these
"""
#image1
im_encoder = Image_encoder()
bkfeat1 = im_encoder(image1) #shape: (None,9,1024)
print(bkfeat1.shape, 'image1 after image encoder')
bk_dense = Dense(dense_dim,name = 'bkdense',activation = 'relu') #shape: (None,9,512)
bkfeat1 = bk_dense(bkfeat1)
print(bkfeat1.shape, 'image encoder output from bk_dense')
#image2
_bkfeat2 = im_encoder(image2) #shape: (None,9,1024)
bkfeat2 = bk_dense(_bkfeat2) #shape: (None,9,512)
#combining image1 and image2
concat = Concatenate(axis=1)([bkfeat1,bkfeat2]) #concatenating through the second axis shape: (None,18,1024)
bn = BatchNormalization(name = "encoder_batch_norm")(concat)
dropout = Dropout(dropout_rate, name = "encoder_dropout")(bn)
print(dropout.shape, 'encoder output')
return dropout, _bkfeat2, bkfeat2
class global_attention(tf.keras.layers.Layer):
"""
calculate global attention
"""
def __init__(self,dense_dim):
super().__init__()
# Intialize variables needed for Concat score function here
self.W1 = Dense(units = dense_dim) #weight matrix of shape enc_units*dense_dim
self.W2 = Dense(units = dense_dim) #weight matrix of shape dec_units*dense_dim
self.V = Dense(units = 1) #weight matrix of shape dense_dim*1
#op (None,98,1)
def call(self,encoder_output,decoder_h): #here the encoded output will be the concatted image bk features shape: (None,98,dense_dim)
decoder_h = tf.expand_dims(decoder_h,axis=1) #shape: (None,1,dense_dim)
tanh_input = self.W1(encoder_output) + self.W2(decoder_h) #ouput_shape: batch_size*98*dense_dim
tanh_output = tf.nn.tanh(tanh_input)
attention_weights = tf.nn.softmax(self.V(tanh_output),axis=1) #shape= batch_size*98*1 getting attention alphas
op = attention_weights*encoder_output#op_shape: batch_size*98*dense_dim multiply all aplhas with corresponding context vector
context_vector = tf.reduce_sum(op,axis=1) #summing all context vector over the time period ie input length, output_shape: batch_size*dense_dim
return context_vector,attention_weights
hidden states
and latent states
class One_Step_Decoder(tf.keras.layers.Layer):
"""
decodes a single token
"""
def __init__(self,vocab_size, embedding_dim, max_pad, dense_dim ,name = "onestepdecoder"):
# Initialize decoder embedding layer, LSTM and any other objects needed
super().__init__()
self.dense_dim = dense_dim
self.embedding = Embedding(input_dim = vocab_size+1,
output_dim = embedding_dim,
input_length=max_pad,
mask_zero=True,
name = 'onestepdecoder_embedding'
)
self.LSTM = GRU(units=self.dense_dim,
# return_sequences=True,
return_state=True,
name = 'onestepdecoder_LSTM'
)
self.attention = global_attention(dense_dim = dense_dim)
self.concat = Concatenate(axis=-1)
self.dense = Dense(dense_dim,name = 'onestepdecoder_embedding_dense',activation = 'relu')
self.final = Dense(vocab_size+1,activation='softmax')
self.concat = Concatenate(axis=-1)
self.add =Add()
@tf.function
def call(self,input_to_decoder, encoder_output, decoder_h):#,decoder_c):
'''
One step decoder mechanisim step by step:
A. Pass the input_to_decoder to the embedding layer and then get the output(batch_size,1,embedding_dim)
B. Using the encoder_output and decoder hidden state, compute the context vector.
C. Concat the context vector with the step A output
D. Pass the Step-C output to LSTM/GRU and get the decoder output and states(hidden and cell state)
E. Pass the decoder output to dense layer(vocab size) and store the result into output.
F. Return the states from step D, output from Step E, attention weights from Step -B
here state_h,state_c are decoder states
'''
embedding_op = self.embedding(input_to_decoder) #output shape = batch_size*1*embedding_shape (only 1 token)
context_vector,attention_weights = self.attention(encoder_output,decoder_h) #passing hidden state h of decoder and encoder output
#context_vector shape: batch_size*dense_dim we need to add time dimension
context_vector_time_axis = tf.expand_dims(context_vector,axis=1)
#now we will combine attention output context vector with next word input to the lstm here we will be teacher forcing
concat_input = self.concat([context_vector_time_axis,embedding_op])#output dimension = batch_size*input_length(here it is 1)*(dense_dim+embedding_dim)
output,decoder_h = self.LSTM(concat_input,initial_state = decoder_h)
#output shape = batch*1*dense_dim and decoder_h,decoder_c has shape = batch*dense_dim
#we need to remove the time axis from this decoder_output
output = self.final(output)#shape = batch_size*decoder vocab size
return output,decoder_h,attention_weights
class decoder(tf.keras.Model):
"""
Decodes the encoder output and caption
"""
def __init__(self,max_pad, embedding_dim,dense_dim,batch_size ,vocab_size):
super().__init__()
self.onestepdecoder = One_Step_Decoder(vocab_size = vocab_size, embedding_dim = embedding_dim, max_pad = max_pad, dense_dim = dense_dim)
self.output_array = tf.TensorArray(tf.float32,size=max_pad)
self.max_pad = max_pad
self.batch_size = batch_size
self.dense_dim =dense_dim
@tf.function
def call(self,encoder_output,caption):#,decoder_h,decoder_c): #caption : (None,max_pad), encoder_output: (None,dense_dim)
decoder_h, decoder_c = tf.zeros_like(encoder_output[:,0]), tf.zeros_like(encoder_output[:,0]) #decoder_h, decoder_c
output_array = tf.TensorArray(tf.float32, size=self.max_pad)
for timestep in range(self.max_pad): #iterating through all timesteps ie through max_pad
output,decoder_h,attention_weights = self.onestepdecoder(caption[:,timestep:timestep+1], encoder_output, decoder_h)
output_array = output_array.write(timestep,output) #timestep*batch_size*vocab_size
self.output_array = tf.transpose(output_array.stack(),[1,0,2]) #.stack :Return the values in the TensorArray as a stacked Tensor.)
#shape output_array: (batch_size,max_pad,vocab_size)
return self.output_array, attention_weights
def create_model():
"""
creates the best model ie the attention model
and returns the model after loading the weights
and also the tokenizer
"""
#hyperparameters
input_size = (224,224)
tokenizer = joblib.load('/content/drive/MyDrive/Medical image Reporting/tokenizer.pkl')
max_pad = 29
batch_size = 100
vocab_size = len(tokenizer.word_index)
embedding_dim = 300
dense_dim = 512
lstm_units = dense_dim
dropout_rate = 0.2
tf.keras.backend.clear_session()
image1 = Input(shape = (input_size + (3,))) #shape = 224,224,3
image2 = Input(shape = (input_size + (3,))) #https://www.w3resource.com/python-exercises/tuple/python-tuple-exercise-5.php
caption = Input(shape = (max_pad,))
encoder_output, _stage1, _stage2 = encoder(image1,image2,dense_dim,dropout_rate) #shape: (None,28,512)
output,_ = decoder(max_pad, embedding_dim,dense_dim,batch_size ,vocab_size)(encoder_output,caption)
model = tf.keras.Model(inputs = [image1,image2,caption], outputs = output)
model_filename = 'Encoder_Decoder_global_attention.h5'
model_save = os.path.join('/content/drive/My Drive/Medical image Reporting',model_filename)
model.load_weights(model_save)
return model,tokenizer, [_stage1, _stage2]
def greedy_search_predict(image1, image2, model, tokenizer, input_size = (224,224)):
"""
Given paths to two x-ray images predicts the impression part of the x-ray in a greedy search algorithm
"""
print(image1.shape, 'initial image')
image1 = tf.expand_dims(cv2.resize(image1, input_size, interpolation = cv2.INTER_NEAREST),axis=0) #introduce batch and resize
image2 = tf.expand_dims(cv2.resize(image2, input_size, interpolation = cv2.INTER_NEAREST),axis=0)
print(image1.shape)
image1 = model.get_layer('image_encoder')(image1)
print(image1.shape)
image2 = model.get_layer('image_encoder')(image2)
_image1 = model.get_layer('bkdense')(image1)
print(_image1.shape)
_image2 = model.get_layer('bkdense')(image2)
concat = model.get_layer('concatenate')([_image1,_image2])
enc_op = model.get_layer('encoder_batch_norm')(concat)
enc_op = model.get_layer('encoder_dropout')(enc_op) #this is the output from encoder
print(enc_op.shape, 'encoder output')
decoder_h,decoder_c = tf.zeros_like(enc_op[:,0]),tf.zeros_like(enc_op[:,0])
a = []
pred = []
attention_weights_list = []
max_pad = 29
for i in range(max_pad):
if i==0: #if first word
caption = np.array(tokenizer.texts_to_sequences(['<cls>'])) #shape: (1,1)
output,decoder_h,attention_weights = model.get_layer('decoder').onestepdecoder(caption,enc_op,decoder_h)#,decoder_c) decoder_c,
attention_weights_list.append(attention_weights)
#prediction
max_prob = tf.argmax(output,axis=-1) #tf.Tensor of shape = (1,1)
caption = np.array([max_prob]) #will be sent to onstepdecoder for next iteration
if max_prob==np.squeeze(tokenizer.texts_to_sequences(['<end>'])):
break;
else:
a.append(tf.squeeze(max_prob).numpy())
return tokenizer.sequences_to_texts([a])[0], [image1, _image1, attention_weights_list] #here output would be 1,1 so subscripting to open the array
def predict1(image1, image2=None, model_tokenizer = None):
"""given image1 and image 2 filepaths returns the predicted caption,
the model_tokenizer will contain stored model_weights and tokenizer
"""
if image2 == None: #if only 1 image file is given
image2 = image1
try:
image1 = cv2.imread(image1)/255
image2 = cv2.imread(image2)/255
except Exception as e:
print(e)
return print("Must be an image")
if model_tokenizer == None:
model,tokenizer, stage_list = create_model()
else:
model,tokenizer = model_tokenizer[0],model_tokenizer[1]
predicted_caption, stage_list = greedy_search_predict(image1,image2,model,tokenizer)
return predicted_caption, stage_list
def function1(image1, image2):
"""
here image1 and image2 will be a list of image
filepaths and outputs the resulting captions as a list
"""
model_tokenizer = list(create_model())
predicted_caption = []
for i1,i2 in zip(image1,image2):
caption, stages = predict1(i1,i2,model_tokenizer)
predicted_caption.append(caption)
return predicted_caption, stages
df = pd.read_csv(labels_file)
index = random.randint(0,len(df))
name = df['Image Index'].iloc[index]
true_label = df['Finding Labels'].iloc[index]
print(name, true_label)
fig = plt.figure(figsize=(8,8))
plt.imshow(cv2.imread(image_folder+name))
plt.axis('off')
plt.tight_layout()
00000832_009.png Infiltration
k = [7,300]
image1 = [image_folder+name]
result, stages = function1(image1, image1)
(None, 9, 1024) image1 after image encoder (None, 9, 512) image encoder output from bk_dense (None, 18, 512) encoder output (1024, 1024, 3) initial image (1, 224, 224, 3) (1, 9, 1024) (1, 9, 512) (1, 18, 512) encoder output
fig = plt.figure(figsize=(32,32))
plt.imshow(stages[0][0], cmap='gray')
plt.axis('off')
plt.tight_layout()
fig = plt.figure(figsize=(32,32))
plt.imshow(stages[1][0], cmap='gray')
plt.axis('off')
plt.tight_layout()
attention_img = np.zeros((stages[2][0].shape[1], stages[2][0].shape[1]))
for x in range(len(stages[2])):
attention_img[:,x] = np.array(stages[2][x][0]).reshape(-1)
fig = plt.figure(figsize=(12,12))
plt.imshow(attention_img, cmap="gray")
plt.axis('off')
plt.tight_layout()
from IPython.display import display, Markdown, Latex
fig = plt.figure(figsize=(8,8))
plt.imshow(cv2.imread(image_folder+name))
plt.axis('off')
plt.tight_layout()
display(Markdown(f"## {result[0]}"))