import os from datetime import datetime import SimpleITK as sitk import numpy as np from sklearn.utils import class_weight def model_gen(input_dim,x,y,slice_no): X1 = [] X2 = [] Y = [] for i in range(int((input_dim)/2),y.shape[0]-int((input_dim)/2)): for j in range(int((input_dim)/2),y.shape[2]-int((input_dim)/2)): #Filtering all 0 patches if(x[i-16:i+17,j-16:j+17,:].any != 0): X2.append(x[i-16:i+17,j-16:j+17,:]) X1.append(x[i-int((input_dim)/2):i+int((input_dim)/2)+1,j-int((input_dim)/2):j+int((input_dim)/2)+1,:]) Y.append(y[i,slice_no,j]) X1 = np.asarray(X1) X2 = np.asarray(X2) Y = np.asarray(Y) d = [X1,X2,Y] return d def data_gen(data,y,slice_no,model_no): d = [] x = data[slice_no] #filtering all 0 slices and non-tumor slices if(x.any() != 0 and y.any() != 0): if(model_no == 0): X1 = [] for i in range(16,159): for j in range(16,199): if(x[i-16:i+17,j-16:j+17,:].all != 0): X1.append(x[i-16:i+17,j-16:j+17,:]) Y1 = [] for i in range(16,159): for j in range(16,199): if(x[i-16:i+17,j-16:j+17,:].all != 0): Y1.append(y[i,slice_no,j]) X1 = np.asarray(X1) Y1 = np.asarray(Y1) d = [X1,Y1] elif(model_no == 1): d = model_gen(65,x,y,slice_no) elif(model_no == 2): d = model_gen(56,x,y,slice_no) elif(model_no == 3): d = model_gen(53,x,y,slice_no) return d """Model Definations""" import keras import keras from keras import layers from keras.layers import Input, Dense, Activation, ZeroPadding2D, BatchNormalization, Flatten, Conv2D, Lambda,Concatenate from keras.layers import AveragePooling2D, MaxPooling2D, Dropout, GlobalMaxPooling2D, GlobalAveragePooling2D, Add from keras.models import Model from keras import regularizers from keras.preprocessing import image from keras.utils import layer_utils from keras.utils.data_utils import get_file from keras.applications.imagenet_utils import preprocess_input from keras.initializers import glorot_normal from keras.callbacks import ModelCheckpoint from IPython.display import SVG from keras.utils.vis_utils import model_to_dot from keras.utils import plot_model def two_path(X_input): # Local path Conv1 X = Conv2D(64,(7,7),strides=(1,1),padding='valid')(X_input) # Batch-norm X = BatchNormalization()(X) X1 = Conv2D(64,(7,7),strides=(1,1),padding='valid')(X_input) X1 = BatchNormalization()(X1) # Max-out X = layers.Maximum()([X,X1]) X = Conv2D(64,(4,4),strides=(1,1),padding='valid',activation='relu')(X) # Global path X2 = Conv2D(160,(13,13),strides=(1,1),padding='valid')(X_input) X2 = BatchNormalization()(X2) X21 = Conv2D(160,(13,13),strides=(1,1),padding='valid')(X_input) X21 = BatchNormalization()(X21) # Max-out X2 = layers.Maximum()([X2,X21]) # Local path Conv2 X3 = Conv2D(64,(3,3),strides=(1,1),padding='valid')(X) X3 = BatchNormalization()(X3) X31 = Conv2D(64,(3,3),strides=(1,1),padding='valid')(X) X31 = BatchNormalization()(X31) X = layers.Maximum()([X3,X31]) X = Conv2D(64,(2,2),strides=(1,1),padding='valid',activation='relu')(X) # Merging the two paths X = Concatenate()([X2,X]) #X = Conv2D(5,(21,21),strides=(1,1))(X) #X = Activation('softmax')(X) #model = Model(inputs = X_input, outputs = X) return X def input_cascade(input_shape1,input_shape2): X1_input = Input(input_shape1) # 1st two-path of cascade X1 = two_path(X1_input) X1 = Conv2D(5,(21,21),strides=(1,1),padding='valid',activation='relu')(X1) X1 = BatchNormalization()(X1) X2_input = Input(input_shape2) # Concatenating the output of 1st to input of 2nd X2_input1 = Concatenate()([X1,X2_input]) #X2_input1 = Input(tensor = X2_input1) X2 = two_path(X2_input1) # Fully convolutional softmax classification X2 = Conv2D(5,(21,21),strides=(1,1),padding='valid')(X2) X2 = BatchNormalization()(X2) X2 = Activation('softmax')(X2) model = Model(inputs=[X1_input,X2_input],outputs=X2) return model def MFCcascade(input_shape1,input_shape2): # 1st two-path X1_input = Input(input_shape1) X1 = two_path(X1_input) X1 = Conv2D(5,(21,21),strides=(1,1),padding='valid',activation='relu')(X1) X1 = BatchNormalization()(X1) #X1 = MaxPooling2D((2,2))(X1) #2nd two-path X2_input = Input(input_shape2) X2 = two_path(X2_input) # Concatenate before classification X2 = Concatenate()([X1,X2]) X2 = Conv2D(5,(21,21),strides=(1,1),padding='valid',activation='relu')(X2) X2 = BatchNormalization()(X2) X2 = Activation('softmax')(X2) model = Model(inputs=[X1_input,X2_input],outputs=X2) return model def two_pathcnn(input_shape): X_input = Input(input_shape) X = Conv2D(64,(7,7),strides=(1,1),padding='valid')(X_input) X = BatchNormalization()(X) X1 = Conv2D(64,(7,7),strides=(1,1),padding='valid')(X_input) X1 = BatchNormalization()(X1) X = layers.Maximum()([X,X1]) X = Conv2D(64,(4,4),strides=(1,1),padding='valid',activation='relu')(X) X2 = Conv2D(160,(13,13),strides=(1,1),padding='valid')(X_input) X2 = BatchNormalization()(X2) X21 = Conv2D(160,(13,13),strides=(1,1),padding='valid')(X_input) X21 = BatchNormalization()(X21) X2 = layers.Maximum()([X2,X21]) X3 = Conv2D(64,(3,3),strides=(1,1),padding='valid')(X) X3 = BatchNormalization()(X3) X31 = Conv2D(64,(3,3),strides=(1,1),padding='valid')(X) X31 = BatchNormalization()(X31) X = layers.Maximum()([X3,X31]) X = Conv2D(64,(2,2),strides=(1,1),padding='valid',activation='relu')(X) X = Concatenate()([X2,X]) X = Conv2D(5,(21,21),strides=(1,1),padding='valid')(X) X = Activation('softmax')(X) model = Model(inputs = X_input, outputs = X) return model from sklearn.utils import class_weight m1 = input_cascade((65,65,4),(33,33,4)) """Training for the InputCascadeCNN model""" fold = os.listdir("/home/u32810/Dataset/BRATS-2/Image_Data/HG/") fold.sort(key=str.lower) optimizer = keras.optimizers.Adagrad(lr = 0.08) for path in fold: print(path) path = "/home/u32810/Dataset/BRATS-2/Image_Data/HG/" + path p = os.listdir(path) #p.sort(key=str.lower) arr = [] # Reading from 4 images and creating 4 channel slice-wise for i in range(len(p)): if(i != 4): p1 = os.listdir(path+'/'+p[i]) p1.sort() img = sitk.ReadImage(path+'/'+p[i]+'/'+p1[-1]) arr.append(sitk.GetArrayFromImage(img)) else: p1 = os.listdir(path+'/'+p[i]) img = sitk.ReadImage(path+'/'+p[i]+'/'+p1[0]) Y_labels = sitk.GetArrayFromImage(img) data = np.zeros((Y_labels.shape[1],Y_labels.shape[0],Y_labels.shape[2],4)) for i in range(Y_labels.shape[1]): data[i,:,:,0] = arr[0][:,i,:] data[i,:,:,1] = arr[1][:,i,:] data[i,:,:,2] = arr[2][:,i,:] data[i,:,:,3] = arr[3][:,i,:] print(data.shape) info = [] # Creating patches for each slice and training(slice-wise) for i in range(data.shape[0]): d = data_gen(data,Y_labels,i,1) if(len(d) != 0): y = np.zeros((d[2].shape[0],1,1,5)) for j in range(y.shape[0]): y[j,:,:,d[2][j]] = 1 X1 = d[0] X2 = d[1] class_weights = class_weight.compute_class_weight('balanced', np.unique(d[2]), d[2]) print('slice no:'+str(i)) m1.compile(optimizer = optimizer, loss = 'categorical_crossentropy', metrics = ['accuracy']) filepath="~/Checkpoint/weights-improvement-best.hdf5" checkpoint = ModelCheckpoint(filepath, monitor='val_accuracy', verbose = 1, save_best_only=True, mode='max') callbacks_list = [checkpoint] info.append(m1.fit([X1,X2],y,epochs=5,batch_size=128,class_weight= class_weights, callbacks= callbacks_list)) m1.save(f'CascasdeMulEpoch.h5{datetime.now()}')