/ inputs_processing / generate_images.py
generate_images.py
1 import numpy as np 2 from tensorflow import keras 3 4 class DataGenerator(keras.utils.Sequence): 5 def __init__(self, path, df, x1_col, x2_col, y_col, batch_size=32, num_classes=None, shuffle=False, onera=False, norm=False): 6 self.path = path 7 self.batch_size = batch_size 8 self.df = df 9 self.indices = self.df.index.tolist() 10 self.num_classes = num_classes 11 self.shuffle = shuffle 12 self.x1_col = x1_col 13 self.x2_col = x2_col 14 self.y_col = y_col 15 self.onera = onera 16 self.norm = norm 17 self.on_epoch_end() 18 19 def __len__(self): 20 #return len((self.indices) / self.batch_size) 21 return int(np.floor(len(self.indices) / self.batch_size)) 22 23 def __getitem__(self, index): 24 index = self.index[index * self.batch_size:(index + 1) * self.batch_size] 25 #print("Index:", index) 26 batch = [self.indices[k] for k in index] 27 #print("Batch", batch) 28 X1, X2, y = self.get_data(batch) 29 return [X1,X2],y 30 31 def on_epoch_end(self): 32 self.index = np.arange(len(self.indices)) 33 if self.shuffle == True: 34 np.random.shuffle(self.index) 35 36 def get_data(self, batch): 37 X1 = np.ndarray(shape=(self.batch_size , 96,96,3)) 38 X2 = np.ndarray(shape=(self.batch_size , 96,96,3)) 39 y = np.ndarray(shape=(self.batch_size ,)) 40 41 for i, id in enumerate(batch): 42 #print("i", i, "id", id) 43 x1_temp = np.load(self.path + self.x1_col[id]) 44 #print("loading image:", self.x1_col[id]) 45 x2_temp = np.load(self.path + self.x2_col[id]) 46 #print("loading image:", self.x2_col[id]) 47 if self.onera == False: 48 X1[i,] = x1_temp[:,:,1:4] 49 X2[i,] = x2_temp[:,:,1:4] 50 else: 51 X1[i,] = x1_temp 52 X2[i,] = x2_temp 53 y[i] = self.y_col[id] 54 55 if self.norm == True: 56 X1 = (X1 - X1.mean()) / X1.std() 57 X2 = (X2 - X2.mean()) / X2.std() 58 return X1, X2, y 59 60 #keras.utils.to_categorical(y, num_classes=self.num_classes) 61 62 class CD_DataGenerator(keras.utils.Sequence): 63 def __init__(self, path, df, x1_col, x2_col, y_col, batch_size=32, num_classes=None, shuffle=False, onera=False, norm=False): 64 self.path = path 65 self.batch_size = batch_size 66 self.df = df 67 self.indices = self.df.index.tolist() 68 self.num_classes = num_classes 69 self.shuffle = shuffle 70 self.x1_col = x1_col 71 self.x2_col = x2_col 72 self.y_col = y_col 73 self.onera = onera 74 self.norm = norm 75 self.on_epoch_end() 76 77 def __len__(self): 78 #return len((self.indices) / self.batch_size) 79 return int(np.floor(len(self.indices) / self.batch_size)) 80 81 def __getitem__(self, index): 82 index = self.index[index * self.batch_size:(index + 1) * self.batch_size] 83 batch = [self.indices[k] for k in index] 84 X1, X2, y = self.get_data(batch) 85 return [X1,X2],y 86 87 def on_epoch_end(self): 88 self.index = np.arange(len(self.indices)) 89 if self.shuffle == True: 90 np.random.shuffle(self.index) 91 92 def get_data(self, batch): 93 X1 = np.ndarray(shape=(self.batch_size , 96,96,3)) 94 X2 = np.ndarray(shape=(self.batch_size , 96,96,3)) 95 y = np.ndarray(shape=(self.batch_size ,96,96,)) 96 97 for i, id in enumerate(batch): 98 x1_temp = np.load(self.path + self.x1_col[id]) 99 x2_temp = np.load(self.path + self.x2_col[id]) 100 y_temp = np.load(self.path + self.y_col[id]) 101 102 103 if self.onera == False: 104 X1[i,] = x1_temp[:,:,1:4] 105 X2[i,] = x2_temp[:,:,1:4] 106 else: 107 X1[i,] = x1_temp 108 X2[i,] = x2_temp 109 y[i,] = y_temp 110 if self.norm == True: 111 X1 = (X1 - X1.mean()) / X1.std() 112 X2 = (X2 - X2.mean()) / X2.std() 113 return X1, X2, keras.utils.to_categorical(y, num_classes=self.num_classes) 114 115 116