diff --git a/skluc/data/__init__.py b/skluc/examples/data/__init__.py
similarity index 100%
rename from skluc/data/__init__.py
rename to skluc/examples/data/__init__.py
diff --git a/skluc/examples/tasks/classification/omniglot/pretrained/__init__.py b/skluc/examples/tasks/classification/omniglot/pretrained/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/skluc/examples/tasks/classification/omniglot/pretrained/omniglot_28x28_pretrained_lenet.py b/skluc/examples/tasks/classification/omniglot/pretrained/omniglot_28x28_pretrained_lenet.py
new file mode 100644
index 0000000000000000000000000000000000000000..80435e8b597d989dd1886f17d07ac73e7eb451c9
--- /dev/null
+++ b/skluc/examples/tasks/classification/omniglot/pretrained/omniglot_28x28_pretrained_lenet.py
@@ -0,0 +1,61 @@
+import time
+
+import numpy as np
+from keras import Sequential, optimizers
+from keras.initializers import he_normal
+from keras.layers import Dense, Activation, BatchNormalization
+
+from skluc.main.data.mldatasets import OmniglotDataset
+from skluc.main.data.transformation.LeCunTransformer import LecunTransformer
+from skluc.main.data.transformation.ResizeTransformer import ResizeTransformer
+
+if __name__ == "__main__":
+    input_shape = (28, 28, 1)
+    validation_size = 1000
+    n_epoch = 200
+    batch_size = 128
+    weight_decay = 0.0001
+    num_classes = 964
+
+    data = OmniglotDataset(validation_size=validation_size, seed=0)
+    data.load()
+    data.normalize()
+    data.to_one_hot()
+    data.data_astype(np.float32)
+    data.labels_astype(np.float32)
+    data.to_image()
+    resize_trans = ResizeTransformer(data.s_name, output_shape=input_shape[:-1])
+    data.apply_transformer(resize_trans)
+    lenet_transformer = LecunTransformer("omniglot_28x28", cut_layer_name="activation_2")
+    data.apply_transformer(lenet_transformer)
+    data.normalize()
+    data.flatten()
+
+    model = Sequential()
+
+    model.add(Dense(120, kernel_initializer=he_normal(), input_shape=data.train.data[0].shape))
+    model.add(BatchNormalization())
+    model.add(Activation('relu'))
+
+    model.add(Dense(84, kernel_initializer=he_normal()))
+    model.add(BatchNormalization())
+    model.add(Activation('relu'))
+
+    model.add(Dense(num_classes, kernel_initializer=he_normal()))
+    model.add(BatchNormalization())
+    model.add(Activation('softmax'))
+    sgd = optimizers.SGD(lr=.1, momentum=0.9, nesterov=True)
+    model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])
+
+    print('Using real-time data augmentation.')
+    x_train, y_train = data.train
+    x_test, y_test = data.test
+    x_val, y_val = data.validation
+
+    model.fit(x_train, y_train,
+              batch_size=batch_size,
+              epochs=n_epoch,
+              validation_data=(x_val, y_val))
+
+    model.save('{}_{}.h5'.format(time.time(), __file__))
+    print("Final evaluation on val set: {}".format(model.evaluate(x_val, y_val)))
diff --git a/skluc/examples/tasks/classification/omniglot/pretrained/omniglot_28x28_pretrained_siamese_lenet.py b/skluc/examples/tasks/classification/omniglot/pretrained/omniglot_28x28_pretrained_siamese_lenet.py
new file mode 100644
index 0000000000000000000000000000000000000000..4f2ce32053cd74c703a537603d43f4aae60dc8d0
--- /dev/null
+++ b/skluc/examples/tasks/classification/omniglot/pretrained/omniglot_28x28_pretrained_siamese_lenet.py
@@ -0,0 +1,60 @@
+import time
+
+import numpy as np
+from keras import Sequential, optimizers
+from keras.initializers import he_normal
+from keras.layers import Dense, Activation, BatchNormalization
+
+from skluc.main.data.mldatasets import OmniglotDataset
+from skluc.main.data.transformation.LeCunTransformer import LecunTransformer
+from skluc.main.data.transformation.ResizeTransformer import ResizeTransformer
+
+if __name__ == "__main__":
+    input_shape = (28, 28, 1)
+    validation_size = 1000
+    n_epoch = 200
+    batch_size = 128
+    weight_decay = 0.0001
+    num_classes = 964
+
+    data = OmniglotDataset(validation_size=validation_size, seed=0)
+    data.load()
+    data.normalize()
+    data.to_one_hot()
+    data.data_astype(np.float32)
+    data.labels_astype(np.float32)
+    data.to_image()
+    resize_trans = ResizeTransformer(data.s_name, output_shape=input_shape[:-1])
+    data.apply_transformer(resize_trans)
+    siamese_lenet_transformer = LecunTransformer("siamese_omniglot_28x28", cut_layer_index=-4)
+    data.apply_transformer(siamese_lenet_transformer)
+    data.normalize()
+
+    model = Sequential()
+
+    model.add(Dense(120, kernel_initializer=he_normal(), input_shape=data.train.data[0].shape))
+    model.add(BatchNormalization())
+    model.add(Activation('relu'))
+
+    model.add(Dense(84, kernel_initializer=he_normal()))
+    model.add(BatchNormalization())
+    model.add(Activation('relu'))
+
+    model.add(Dense(num_classes, kernel_initializer=he_normal()))
+    model.add(BatchNormalization())
+    model.add(Activation('softmax'))
+    sgd = optimizers.SGD(lr=.1, momentum=0.9, nesterov=True)
+    model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])
+
+    print('Using real-time data augmentation.')
+    x_train, y_train = data.train
+    x_test, y_test = data.test
+    x_val, y_val = data.validation
+
+    model.fit(x_train, y_train,
+              batch_size=batch_size,
+              epochs=n_epoch,
+              validation_data=(x_val, y_val))
+
+    model.save('{}_{}.h5'.format(time.time(), __file__))
+    print("Final evaluation on val set: {}".format(model.evaluate(x_val, y_val)))
diff --git a/skluc/examples/tasks/classification/omniglot/pretrained/omniglot_28x28_pretrained_siamese_vgg19_less_pool.py b/skluc/examples/tasks/classification/omniglot/pretrained/omniglot_28x28_pretrained_siamese_vgg19_less_pool.py
new file mode 100644
index 0000000000000000000000000000000000000000..0cde78954fb2b3ccacb36a7b3cf75041509c74df
--- /dev/null
+++ b/skluc/examples/tasks/classification/omniglot/pretrained/omniglot_28x28_pretrained_siamese_vgg19_less_pool.py
@@ -0,0 +1,68 @@
+import time
+
+import keras
+import numpy as np
+from keras import Sequential, optimizers
+from keras.initializers import he_normal
+from keras.layers import Dense, Activation, BatchNormalization
+
+from skluc.main.data.mldatasets import OmniglotDataset
+from skluc.main.data.transformation.ResizeTransformer import ResizeTransformer
+from skluc.main.data.transformation.VGG19Transformer import VGG19Transformer
+
+if __name__ == "__main__":
+    input_shape = (28, 28, 1)
+    validation_size = 1000
+    n_epoch = 200
+    batch_size = 128
+    weight_decay = 0.0001
+    num_classes = 964
+
+    data = OmniglotDataset(validation_size=validation_size, seed=0)
+    data.load()
+    data.normalize()
+    data.to_one_hot()
+    data.data_astype(np.float32)
+    data.labels_astype(np.float32)
+    data.to_image()
+    resize_trans = ResizeTransformer(data.s_name, output_shape=input_shape[:-1])
+    data.apply_transformer(resize_trans)
+    siamese_vgg19_transformer = VGG19Transformer("siamese_omniglot_28x28", cut_layer_name="activation_16")
+    data.apply_transformer(siamese_vgg19_transformer)
+    data.normalize()
+    data.flatten()
+
+    model = Sequential()
+
+    # Block 1
+    model.add(Dense(512, use_bias=True, kernel_regularizer=keras.regularizers.l2(weight_decay),
+                    kernel_initializer=he_normal(), input_shape=data.train.data[0].shape))
+    model.add(BatchNormalization())
+    model.add(Activation('relu'))
+
+    model.add(
+        Dense(512, kernel_regularizer=keras.regularizers.l2(weight_decay), kernel_initializer=he_normal(), name='fc2'))
+    model.add(BatchNormalization())
+    model.add(Activation('relu'))
+
+    model.add(Dense(num_classes, kernel_regularizer=keras.regularizers.l2(weight_decay), kernel_initializer=he_normal(),
+                    name='predictions'))
+
+    model.add(BatchNormalization())
+    model.add(Activation('softmax'))
+
+    sgd = optimizers.SGD(lr=.1, momentum=0.9, nesterov=True)
+    model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])
+
+    print('Using real-time data augmentation.')
+    x_train, y_train = data.train
+    x_test, y_test = data.test
+    x_val, y_val = data.validation
+
+    model.fit(x_train, y_train,
+              batch_size=batch_size,
+              epochs=n_epoch,
+              validation_data=(x_val, y_val))
+
+    model.save('{}_{}.h5'.format(time.time(), __file__))
+    print("Final evaluation on val set: {}".format(model.evaluate(x_val, y_val)))
diff --git a/skluc/examples/tasks/classification/omniglot/pretrained/omniglot_28x28_pretrained_siamese_vinyals.py b/skluc/examples/tasks/classification/omniglot/pretrained/omniglot_28x28_pretrained_siamese_vinyals.py
new file mode 100644
index 0000000000000000000000000000000000000000..752a2a2625c0993d9f0b95e7a1de113e4d70b117
--- /dev/null
+++ b/skluc/examples/tasks/classification/omniglot/pretrained/omniglot_28x28_pretrained_siamese_vinyals.py
@@ -0,0 +1,60 @@
+import time
+
+import numpy as np
+from keras import Sequential, optimizers
+from keras.initializers import he_normal
+from keras.layers import Dense, Activation, BatchNormalization
+
+from skluc.main.data.mldatasets import OmniglotDataset
+from skluc.main.data.transformation.ResizeTransformer import ResizeTransformer
+from skluc.main.data.transformation.VinyalsTransformer import VinyalsTransformer
+
+if __name__ == "__main__":
+    input_shape = (28, 28, 1)
+    validation_size = 1000
+    n_epoch = 200
+    batch_size = 128
+    weight_decay = 0.0001
+    num_classes = 964
+
+    data = OmniglotDataset(validation_size=validation_size, seed=0)
+    data.load()
+    data.normalize()
+    data.to_one_hot()
+    data.data_astype(np.float32)
+    data.labels_astype(np.float32)
+    data.to_image()
+    resize_trans = ResizeTransformer(data.s_name, output_shape=input_shape[:-1])
+    data.apply_transformer(resize_trans)
+    siamese_vinyals_transformer = VinyalsTransformer("siamese_omniglot_28x28", cut_layer_index=-4)
+    data.apply_transformer(siamese_vinyals_transformer)
+    data.normalize()
+
+    model = Sequential()
+
+    model.add(Dense(120, kernel_initializer=he_normal(), input_shape=data.train.data[0].shape))
+    model.add(BatchNormalization())
+    model.add(Activation('relu'))
+
+    model.add(Dense(84, kernel_initializer=he_normal()))
+    model.add(BatchNormalization())
+    model.add(Activation('relu'))
+
+    model.add(Dense(num_classes, kernel_initializer=he_normal()))
+    model.add(BatchNormalization())
+    model.add(Activation('softmax'))
+    sgd = optimizers.SGD(lr=.1, momentum=0.9, nesterov=True)
+    model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])
+
+    print('Using real-time data augmentation.')
+    x_train, y_train = data.train
+    x_test, y_test = data.test
+    x_val, y_val = data.validation
+
+    model.fit(x_train, y_train,
+              batch_size=batch_size,
+              epochs=n_epoch,
+              validation_data=(x_val, y_val))
+
+    model.save('{}_{}.h5'.format(time.time(), __file__))
+    print("Final evaluation on val set: {}".format(model.evaluate(x_val, y_val)))
diff --git a/skluc/examples/tasks/classification/omniglot/pretrained/omniglot_28x28_pretrained_vgg19_less_pool.py b/skluc/examples/tasks/classification/omniglot/pretrained/omniglot_28x28_pretrained_vgg19_less_pool.py
new file mode 100644
index 0000000000000000000000000000000000000000..2346c6cef78f55a73e11d41b9d0a0f8eea7ff597
--- /dev/null
+++ b/skluc/examples/tasks/classification/omniglot/pretrained/omniglot_28x28_pretrained_vgg19_less_pool.py
@@ -0,0 +1,68 @@
+import time
+
+import keras
+import numpy as np
+from keras import Sequential, optimizers
+from keras.initializers import he_normal
+from keras.layers import Dense, Activation, BatchNormalization
+
+from skluc.main.data.mldatasets import OmniglotDataset
+from skluc.main.data.transformation.ResizeTransformer import ResizeTransformer
+from skluc.main.data.transformation.VGG19Transformer import VGG19Transformer
+
+if __name__ == "__main__":
+    input_shape = (28, 28, 1)
+    validation_size = 1000
+    n_epoch = 200
+    batch_size = 128
+    weight_decay = 0.0001
+    num_classes = 964
+
+    data = OmniglotDataset(validation_size=validation_size, seed=0)
+    data.load()
+    data.normalize()
+    data.to_one_hot()
+    data.data_astype(np.float32)
+    data.labels_astype(np.float32)
+    data.to_image()
+    resize_trans = ResizeTransformer(data.s_name, output_shape=input_shape[:-1])
+    data.apply_transformer(resize_trans)
+    siamese_vgg19_transformer = VGG19Transformer("omniglot_28x28", cut_layer_name="activation_16")
+    data.apply_transformer(siamese_vgg19_transformer)
+    data.normalize()
+    data.flatten()
+
+    model = Sequential()
+
+    # Block 1
+    model.add(Dense(512, use_bias=True, kernel_regularizer=keras.regularizers.l2(weight_decay),
+                    kernel_initializer=he_normal(), input_shape=data.train.data[0].shape))
+    model.add(BatchNormalization())
+    model.add(Activation('relu'))
+
+    model.add(
+        Dense(512, kernel_regularizer=keras.regularizers.l2(weight_decay), kernel_initializer=he_normal(), name='fc2'))
+    model.add(BatchNormalization())
+    model.add(Activation('relu'))
+
+    model.add(Dense(num_classes, kernel_regularizer=keras.regularizers.l2(weight_decay), kernel_initializer=he_normal(),
+                    name='predictions'))
+
+    model.add(BatchNormalization())
+    model.add(Activation('softmax'))
+
+    sgd = optimizers.SGD(lr=.1, momentum=0.9, nesterov=True)
+    model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])
+
+    print('Using real-time data augmentation.')
+    x_train, y_train = data.train
+    x_test, y_test = data.test
+    x_val, y_val = data.validation
+
+    model.fit(x_train, y_train,
+              batch_size=batch_size,
+              epochs=n_epoch,
+              validation_data=(x_val, y_val))
+
+    model.save('{}_{}.h5'.format(time.time(), __file__))
+    print("Final evaluation on val set: {}".format(model.evaluate(x_val, y_val)))
diff --git a/skluc/examples/tasks/classification/omniglot/pretrained/omniglot_28x28_pretrained_vinyals.py b/skluc/examples/tasks/classification/omniglot/pretrained/omniglot_28x28_pretrained_vinyals.py
new file mode 100644
index 0000000000000000000000000000000000000000..51b1f5132617bad40ed627ff82776507cd2239b3
--- /dev/null
+++ b/skluc/examples/tasks/classification/omniglot/pretrained/omniglot_28x28_pretrained_vinyals.py
@@ -0,0 +1,61 @@
+import time
+
+import numpy as np
+from keras import Sequential, optimizers
+from keras.initializers import he_normal
+from keras.layers import Dense, Activation, BatchNormalization
+
+from skluc.main.data.mldatasets import OmniglotDataset
+from skluc.main.data.transformation.ResizeTransformer import ResizeTransformer
+from skluc.main.data.transformation.VinyalsTransformer import VinyalsTransformer
+
+if __name__ == "__main__":
+    input_shape = (28, 28, 1)
+    validation_size = 1000
+    n_epoch = 200
+    batch_size = 128
+    weight_decay = 0.0001
+    num_classes = 964
+
+    data = OmniglotDataset(validation_size=validation_size, seed=0)
+    data.load()
+    data.normalize()
+    data.to_one_hot()
+    data.data_astype(np.float32)
+    data.labels_astype(np.float32)
+    data.to_image()
+    resize_trans = ResizeTransformer(data.s_name, output_shape=input_shape[:-1])
+    data.apply_transformer(resize_trans)
+    siamese_vinyals_transformer = VinyalsTransformer("omniglot_28x28", cut_layer_name="activation_4")
+    data.apply_transformer(siamese_vinyals_transformer)
+    data.normalize()
+    data.flatten()
+
+    model = Sequential()
+
+    model.add(Dense(120, kernel_initializer=he_normal(), input_shape=data.train.data[0].shape))
+    model.add(BatchNormalization())
+    model.add(Activation('relu'))
+
+    model.add(Dense(84, kernel_initializer=he_normal()))
+    model.add(BatchNormalization())
+    model.add(Activation('relu'))
+
+    model.add(Dense(num_classes, kernel_initializer=he_normal()))
+    model.add(BatchNormalization())
+    model.add(Activation('softmax'))
+    sgd = optimizers.SGD(lr=.1, momentum=0.9, nesterov=True)
+    model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])
+
+    print('Using real-time data augmentation.')
+    x_train, y_train = data.train
+    x_test, y_test = data.test
+    x_val, y_val = data.validation
+
+    model.fit(x_train, y_train,
+              batch_size=batch_size,
+              epochs=n_epoch,
+              validation_data=(x_val, y_val))
+
+    model.save('{}_{}.h5'.format(time.time(), __file__))
+    print("Final evaluation on val set: {}".format(model.evaluate(x_val, y_val)))
diff --git a/skluc/examples/tasks/classification/omniglot/scratch/__init__.py b/skluc/examples/tasks/classification/omniglot/scratch/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/skluc/examples/tasks/classification/omniglot/omniglot_28x28_simple.py b/skluc/examples/tasks/classification/omniglot/scratch/omniglot_28x28_lenet.py
similarity index 58%
rename from skluc/examples/tasks/classification/omniglot/omniglot_28x28_simple.py
rename to skluc/examples/tasks/classification/omniglot/scratch/omniglot_28x28_lenet.py
index d03d477a2137ec34c1eda771d3f6abbd8e99926d..6a4c421c074b4f1060ad44655968c38f9e48ac19 100644
--- a/skluc/examples/tasks/classification/omniglot/omniglot_28x28_simple.py
+++ b/skluc/examples/tasks/classification/omniglot/scratch/omniglot_28x28_lenet.py
@@ -2,14 +2,16 @@ import time
 
 import numpy as np
 from keras import optimizers
-from keras.callbacks import LearningRateScheduler, TensorBoard
-from keras.layers import Conv2D, MaxPooling2D
-from keras.layers import Dense, Flatten
+from keras.callbacks import LearningRateScheduler
+from keras.initializers import he_normal
+from keras.layers import Conv2D, MaxPooling2D, Activation, Dense, Flatten
+from keras.layers.normalization import BatchNormalization
 from keras.models import Sequential
 from keras.preprocessing.image import ImageDataGenerator
 
 import skluc.main.data.mldatasets as dataset
 from skluc.main.data.transformation.ResizeTransformer import ResizeTransformer
+from skluc.main.utils import logger
 
 
 def scheduler(epoch):
@@ -28,30 +30,48 @@ def scheduler(epoch):
 
 def model_definition():
     model = Sequential()
-    model.add(
-        Conv2D(6, (5, 5), padding='valid', activation='relu', kernel_initializer='he_normal', input_shape=input_shape))
+
+    model.add(Conv2D(64, (5, 5), padding='same', kernel_initializer=he_normal(), input_shape=input_shape))
+    model.add(BatchNormalization())
     model.add(MaxPooling2D((2, 2), strides=(2, 2)))
-    model.add(Conv2D(16, (5, 5), padding='valid', activation='relu', kernel_initializer='he_normal'))
+    model.add(Activation('relu'))
+
+    model.add(Conv2D(128, (5, 5), padding='same', kernel_initializer=he_normal()))
+    model.add(BatchNormalization())
     model.add(MaxPooling2D((2, 2), strides=(2, 2)))
+    model.add(Activation('relu'))
+
+
     model.add(Flatten())
-    model.add(Dense(120, activation='relu', kernel_initializer='he_normal'))
-    model.add(Dense(84, activation='relu', kernel_initializer='he_normal'))
-    model.add(Dense(num_classes, activation='softmax', kernel_initializer='he_normal'))
+
+    model.add(Dense(120, kernel_initializer=he_normal()))
+    model.add(BatchNormalization())
+    model.add(Activation('relu'))
+
+    model.add(Dense(84, kernel_initializer=he_normal()))
+    model.add(BatchNormalization())
+    model.add(Activation('relu'))
+
+    model.add(Dense(num_classes, kernel_initializer=he_normal()))
+    model.add(BatchNormalization())
+    model.add(Activation('softmax'))
+
     sgd = optimizers.SGD(lr=.1, momentum=0.9, nesterov=True)
     model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])
     return model
 
 
 if __name__ == "__main__":
+    logger.debug("Executing file {}".format(__file__))
     validation_size = 10000
     seed = 0
     num_classes = 964
     batch_size = 128
-    epochs = 2000
-    dropout = 0.5
+    epochs = 100
+    # dropout = 0.5
     weight_decay = 0.0001
     input_shape = (28, 28, 1)
-    log_filepath = r'./simple_retrain_logs/'
+    log_filepath = r'./lenet_logs/'
 
     data = dataset.OmniglotDataset(validation_size=1000, seed=seed)
     data.load()
@@ -60,16 +80,17 @@ if __name__ == "__main__":
     data.data_astype(np.float32)
     data.labels_astype(np.float32)
     data.to_image()
-    resizetrans = ResizeTransformer(data.s_name, (28, 28))
+    resizetrans = ResizeTransformer(data.s_name, input_shape[:-1])
     data.apply_transformer(resizetrans)
     (x_train, y_train), (x_test, y_test) = data.train, data.test
     x_val, y_val = data.validation
 
     model = model_definition()
 
-    tb_cb = TensorBoard(log_dir=log_filepath, histogram_freq=0)
+    # tb_cb = TensorBoard(log_dir=log_filepath, histogram_freq=1, write_grads=True, write_images=True)
     change_lr = LearningRateScheduler(scheduler)
-    cbks = [change_lr, tb_cb]
+    # cbks = [change_lr, tb_cb]
+    cbks = [change_lr]
 
     print('Using real-time data augmentation.')
     datagen = ImageDataGenerator(horizontal_flip=True,
@@ -82,7 +103,7 @@ if __name__ == "__main__":
                         steps_per_epoch=iterations,
                         epochs=epochs,
                         callbacks=cbks,
-                        validation_data=(x_val[:200], y_val[:200]))
+                        validation_data=(x_val, y_val))
 
-    model.save('{}_simple_omniglot.h5'.format(time.time()))
+    model.save('{}_lenet_omniglot_28x28.h5'.format(time.time()))
     print("Final evaluation on val set: {}".format(model.evaluate(x_val, y_val)))
diff --git a/skluc/examples/tasks/classification/omniglot/scratch/omniglot_28x28_vgg19_less_pool.py b/skluc/examples/tasks/classification/omniglot/scratch/omniglot_28x28_vgg19_less_pool.py
new file mode 100644
index 0000000000000000000000000000000000000000..198d2f66541aa3af0cb5a9af6d3b48449f82ce83
--- /dev/null
+++ b/skluc/examples/tasks/classification/omniglot/scratch/omniglot_28x28_vgg19_less_pool.py
@@ -0,0 +1,185 @@
+import time
+
+import keras
+import numpy as np
+from keras import optimizers
+from keras.callbacks import LearningRateScheduler, TensorBoard
+from keras.initializers import he_normal
+from keras.layers import Conv2D, MaxPooling2D
+from keras.layers import Dense, Dropout, Activation, Flatten
+from keras.layers.normalization import BatchNormalization
+from keras.models import Sequential
+from keras.preprocessing.image import ImageDataGenerator
+
+import skluc.main.data.mldatasets as dataset
+from skluc.main.data.transformation.ResizeTransformer import ResizeTransformer
+from skluc.main.utils import logger
+
+
+def scheduler(epoch):
+    """
+    Function to pass to the "LearningrateScheduler"
+
+    :param epoch:
+    :return:
+    """
+    if epoch < 80:
+        return 0.1
+    if epoch < 160:
+        return 0.01
+    return 0.001
+
+
+def model_definition():
+    model = Sequential()
+
+    # Block 1
+    model.add(Conv2D(64, (3, 3), padding='same', kernel_regularizer=keras.regularizers.l2(weight_decay),
+                     kernel_initializer=he_normal(), name='block1_conv1', input_shape=input_shape))
+    model.add(BatchNormalization())
+    model.add(Activation('relu'))
+    model.add(Conv2D(64, (3, 3), padding='same', kernel_regularizer=keras.regularizers.l2(weight_decay),
+                     kernel_initializer=he_normal(), name='block1_conv2'))
+    model.add(BatchNormalization())
+    model.add(Activation('relu'))
+    model.add(MaxPooling2D((2, 2), strides=(2, 2), name='block1_pool'))
+
+    # Block 2
+    model.add(Conv2D(128, (3, 3), padding='same', kernel_regularizer=keras.regularizers.l2(weight_decay),
+                     kernel_initializer=he_normal(), name='block2_conv1'))
+    model.add(BatchNormalization())
+    model.add(Activation('relu'))
+    model.add(Conv2D(128, (3, 3), padding='same', kernel_regularizer=keras.regularizers.l2(weight_decay),
+                     kernel_initializer=he_normal(), name='block2_conv2'))
+    model.add(BatchNormalization())
+    model.add(Activation('relu'))
+    model.add(MaxPooling2D((2, 2), strides=(2, 2), name='block2_pool'))
+
+    # Block 3
+    model.add(Conv2D(256, (3, 3), padding='same', kernel_regularizer=keras.regularizers.l2(weight_decay),
+                     kernel_initializer=he_normal(), name='block3_conv1'))
+    model.add(BatchNormalization())
+    model.add(Activation('relu'))
+    model.add(Conv2D(256, (3, 3), padding='same', kernel_regularizer=keras.regularizers.l2(weight_decay),
+                     kernel_initializer=he_normal(), name='block3_conv2'))
+    model.add(BatchNormalization())
+    model.add(Activation('relu'))
+    model.add(Conv2D(256, (3, 3), padding='same', kernel_regularizer=keras.regularizers.l2(weight_decay),
+                     kernel_initializer=he_normal(), name='block3_conv3'))
+    model.add(BatchNormalization())
+    model.add(Activation('relu'))
+    model.add(Conv2D(256, (3, 3), padding='same', kernel_regularizer=keras.regularizers.l2(weight_decay),
+                     kernel_initializer=he_normal(), name='block3_conv4'))
+    model.add(BatchNormalization())
+    model.add(Activation('relu'))
+    model.add(MaxPooling2D((2, 2), strides=(2, 2), name='block3_pool'))
+
+    # Block 4
+    model.add(Conv2D(512, (3, 3), padding='same', kernel_regularizer=keras.regularizers.l2(weight_decay),
+                     kernel_initializer=he_normal(), name='block4_conv1'))
+    model.add(BatchNormalization())
+    model.add(Activation('relu'))
+    model.add(Conv2D(512, (3, 3), padding='same', kernel_regularizer=keras.regularizers.l2(weight_decay),
+                     kernel_initializer=he_normal(), name='block4_conv2'))
+    model.add(BatchNormalization())
+    model.add(Activation('relu'))
+    model.add(Conv2D(512, (3, 3), padding='same', kernel_regularizer=keras.regularizers.l2(weight_decay),
+                     kernel_initializer=he_normal(), name='block4_conv3'))
+    model.add(BatchNormalization())
+    model.add(Activation('relu'))
+    model.add(Conv2D(512, (3, 3), padding='same', kernel_regularizer=keras.regularizers.l2(weight_decay),
+                     kernel_initializer=he_normal(), name='block4_conv4'))
+    model.add(BatchNormalization())
+    model.add(Activation('relu'))
+    model.add(MaxPooling2D((2, 2), strides=(2, 2), name='block4_pool'))
+
+    # Block 5
+    model.add(Conv2D(512, (3, 3), padding='same', kernel_regularizer=keras.regularizers.l2(weight_decay),
+                     kernel_initializer=he_normal(), name='block5_conv1'))
+    model.add(BatchNormalization())
+    model.add(Activation('relu'))
+    model.add(Conv2D(512, (3, 3), padding='same', kernel_regularizer=keras.regularizers.l2(weight_decay),
+                     kernel_initializer=he_normal(), name='block5_conv2'))
+    model.add(BatchNormalization())
+    model.add(Activation('relu'))
+    model.add(Conv2D(512, (3, 3), padding='same', kernel_regularizer=keras.regularizers.l2(weight_decay),
+                     kernel_initializer=he_normal(), name='block5_conv3'))
+    model.add(BatchNormalization())
+    model.add(Activation('relu'))
+    model.add(Conv2D(512, (3, 3), padding='same', kernel_regularizer=keras.regularizers.l2(weight_decay),
+                     kernel_initializer=he_normal(), name='block5_conv4'))
+    model.add(BatchNormalization())
+    model.add(Activation('relu'))
+
+    model.add(Flatten(name='flatten'))
+
+    model.add(Dense(512, use_bias=True, kernel_regularizer=keras.regularizers.l2(weight_decay),
+                    kernel_initializer=he_normal(), name='fc_cifa10'))
+    model.add(BatchNormalization())
+    model.add(Activation('relu'))
+    model.add(Dropout(dropout))
+
+    model.add(
+        Dense(512, kernel_regularizer=keras.regularizers.l2(weight_decay), kernel_initializer=he_normal(), name='fc2'))
+    model.add(BatchNormalization())
+    model.add(Activation('relu'))
+    model.add(Dropout(dropout))
+
+    model.add(Dense(num_classes, kernel_regularizer=keras.regularizers.l2(weight_decay), kernel_initializer=he_normal(),
+                    name='predictions'))
+
+    model.add(BatchNormalization())
+    model.add(Activation('softmax'))
+
+    # -------- optimizer setting -------- #
+    sgd = optimizers.SGD(lr=.1, momentum=0.9, nesterov=True)
+    model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])
+    return model
+
+
+if __name__ == "__main__":
+    logger.debug("Executing file {}".format(__file__))
+
+    validation_size = 10000
+    seed = 0
+    num_classes = 964
+    batch_size = 128
+    epochs = 200
+    dropout = 0.5
+    weight_decay = 0.0001
+    input_shape = (28, 28, 1)
+    log_filepath = r'./vgg19_logs/'
+
+    data = dataset.OmniglotDataset(validation_size=1000, seed=seed)
+    data.load()
+    data.normalize()
+    data.to_one_hot()
+    data.data_astype(np.float32)
+    data.labels_astype(np.float32)
+    data.to_image()
+    resizetrans = ResizeTransformer(data.s_name, input_shape[:-1])
+    data.apply_transformer(resizetrans)
+    (x_train, y_train), (x_test, y_test) = data.train, data.test
+    x_val, y_val = data.validation
+
+    model = model_definition()
+
+    tb_cb = TensorBoard(log_dir=log_filepath, histogram_freq=0)
+    change_lr = LearningRateScheduler(scheduler)
+    cbks = [change_lr, tb_cb]
+
+    print('Using real-time data augmentation.')
+    datagen = ImageDataGenerator(horizontal_flip=True,
+                                 width_shift_range=0.125, height_shift_range=0.125, fill_mode='constant', cval=0.)
+
+    datagen.fit(x_train)
+
+    iterations = int(data.train.data.shape[0] / batch_size)
+    model.fit_generator(datagen.flow(x_train, y_train, batch_size=batch_size),
+                        steps_per_epoch=iterations,
+                        epochs=epochs,
+                        callbacks=cbks,
+                        validation_data=(x_val[:200], y_val[:200]))
+
+    model.save('{}_vgg19_omniglot.h5'.format(time.time()))
+    print("Final evaluation on val set: {}".format(model.evaluate(x_val, y_val)))
diff --git a/skluc/examples/tasks/classification/omniglot/omniglot_28x28_lenet.py b/skluc/examples/tasks/classification/omniglot/scratch/omniglot_28x28_vinyals.py
similarity index 64%
rename from skluc/examples/tasks/classification/omniglot/omniglot_28x28_lenet.py
rename to skluc/examples/tasks/classification/omniglot/scratch/omniglot_28x28_vinyals.py
index 9133933cf2399da9c7ce4a173bb3a670d371ce70..d9b1ccdc0470b05cbf99749e3bf8ed0883348490 100644
--- a/skluc/examples/tasks/classification/omniglot/omniglot_28x28_lenet.py
+++ b/skluc/examples/tasks/classification/omniglot/scratch/omniglot_28x28_vinyals.py
@@ -3,13 +3,14 @@ import time
 import numpy as np
 from keras import optimizers
 from keras.callbacks import LearningRateScheduler, TensorBoard
-from keras.layers import Conv2D, MaxPooling2D
+from keras.layers import Conv2D, MaxPooling2D, BatchNormalization, Activation
 from keras.layers import Dense, Flatten
 from keras.models import Sequential
 from keras.preprocessing.image import ImageDataGenerator
 
 import skluc.main.data.mldatasets as dataset
 from skluc.main.data.transformation.ResizeTransformer import ResizeTransformer
+from skluc.main.utils import logger
 
 
 def scheduler(epoch):
@@ -28,30 +29,54 @@ def scheduler(epoch):
 
 def model_definition():
     model = Sequential()
+
+    model.add(
+        Conv2D(64, (3, 3), padding='same', kernel_initializer='he_normal', input_shape=input_shape))
+    model.add(BatchNormalization())
+    model.add(MaxPooling2D((2, 2), strides=(2, 2)))
+    model.add(Activation("relu"))
+
+    model.add(
+        Conv2D(64, (3, 3), padding='same', kernel_initializer='he_normal', input_shape=input_shape))
+    model.add(BatchNormalization())
+    model.add(MaxPooling2D((2, 2), strides=(2, 2)))
+    model.add(Activation("relu"))
+
     model.add(
-        Conv2D(6, (5, 5), padding='valid', activation='relu', kernel_initializer='he_normal', input_shape=input_shape))
+        Conv2D(64, (3, 3), padding='same', kernel_initializer='he_normal', input_shape=input_shape))
+    model.add(BatchNormalization())
     model.add(MaxPooling2D((2, 2), strides=(2, 2)))
-    model.add(Conv2D(16, (5, 5), padding='valid', activation='relu', kernel_initializer='he_normal'))
+    model.add(Activation("relu"))
+
+    model.add(
+        Conv2D(64, (3, 3), padding='same', kernel_initializer='he_normal', input_shape=input_shape))
+    model.add(BatchNormalization())
     model.add(MaxPooling2D((2, 2), strides=(2, 2)))
+    model.add(Activation("relu"))
+
+
     model.add(Flatten())
-    model.add(Dense(120, activation='relu', kernel_initializer='he_normal'))
-    model.add(Dense(84, activation='relu', kernel_initializer='he_normal'))
-    model.add(Dense(num_classes, activation='softmax', kernel_initializer='he_normal'))
+
+    model.add(Dense(num_classes, kernel_initializer='he_normal'))
+    model.add(Activation("softmax"))
+
     sgd = optimizers.SGD(lr=.1, momentum=0.9, nesterov=True)
     model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])
     return model
 
 
 if __name__ == "__main__":
+    logger.debug("Executing file {}".format(__file__))
+
     validation_size = 10000
     seed = 0
     num_classes = 964
     batch_size = 128
-    epochs = 2000
+    epochs = 200
     dropout = 0.5
     weight_decay = 0.0001
     input_shape = (28, 28, 1)
-    log_filepath = r'./lenet_retrain_logs/'
+    log_filepath = r'./vinyals_logs/'
 
     data = dataset.OmniglotDataset(validation_size=1000, seed=seed)
     data.load()
@@ -82,7 +107,7 @@ if __name__ == "__main__":
                         steps_per_epoch=iterations,
                         epochs=epochs,
                         callbacks=cbks,
-                        validation_data=(x_val[:200], y_val[:200]))
+                        validation_data=(x_val, y_val))
 
-    model.save('{}_lenet_omniglot.h5'.format(time.time()))
+    model.save('{}_vinyals_omniglot.h5'.format(time.time()))
     print("Final evaluation on val set: {}".format(model.evaluate(x_val, y_val)))
diff --git a/skluc/examples/tasks/classification/omniglot/omniglot_32x32_vgg19.py b/skluc/examples/tasks/classification/omniglot/scratch/omniglot_32x32_vgg19.py
similarity index 98%
rename from skluc/examples/tasks/classification/omniglot/omniglot_32x32_vgg19.py
rename to skluc/examples/tasks/classification/omniglot/scratch/omniglot_32x32_vgg19.py
index ec9d6ab67d915e272c9f1ca796e18d969edddb48..65cca168d42dc4e6a910b93753607b79bbd1f555 100644
--- a/skluc/examples/tasks/classification/omniglot/omniglot_32x32_vgg19.py
+++ b/skluc/examples/tasks/classification/omniglot/scratch/omniglot_32x32_vgg19.py
@@ -140,11 +140,11 @@ if __name__ == "__main__":
     seed = 0
     num_classes = 964
     batch_size = 128
-    epochs = 2000
+    epochs = 300
     dropout = 0.5
     weight_decay = 0.0001
     input_shape = (32, 32, 1)
-    log_filepath = r'./vgg19_retrain_logs/'
+    log_filepath = r'./vgg19_logs/'
 
     data = dataset.OmniglotDataset(validation_size=1000, seed=seed)
     data.load()
@@ -177,5 +177,5 @@ if __name__ == "__main__":
                         callbacks=cbks,
                         validation_data=(x_val[:200], y_val[:200]))
 
-    model.save('{}_vgg19_omniglot.h5'.format(time.time()))
+    model.save('{}_vgg19_less_pool_omniglot.h5'.format(time.time()))
     print("Final evaluation on val set: {}".format(model.evaluate(x_val, y_val)))
diff --git a/skluc/examples/tasks/verification/omniglot/omniglot_28x28_siamese_networks_lenet.py b/skluc/examples/tasks/verification/omniglot/omniglot_28x28_siamese_networks_lenet.py
index a94bcc14472d07848292c57a0ae64c763a42946a..1e9d10a860845d3bb01dcf4cbbbc8ac1f6683733 100644
--- a/skluc/examples/tasks/verification/omniglot/omniglot_28x28_siamese_networks_lenet.py
+++ b/skluc/examples/tasks/verification/omniglot/omniglot_28x28_siamese_networks_lenet.py
@@ -15,6 +15,7 @@ from keras.optimizers import Adam
 from skluc.main.data.mldatasets import OmniglotDataset
 from skluc.main.data.transformation.ResizeTransformer import ResizeTransformer
 from skluc.main.tensorflow_.utils import batch_generator
+from skluc.main.utils import logger
 
 
 def W_init(shape, name=None):
@@ -66,6 +67,7 @@ def model_definition():
 
 
 if __name__ == "__main__":
+    logger.debug("Executing file {}".format(__file__))
     input_shape = (28, 28, 1)
     validation_size = 1000
     n_epoch = 200
@@ -121,4 +123,4 @@ if __name__ == "__main__":
 
     timing = time.time()
     siamese_net.save('{}_siamese_lenet_omniglot_full.h5'.format(timing))
-    siamese_net.save('{}_siamese_lenet_omniglot_conv.h5'.format(timing))
+    convnet_model.save('{}_siamese_lenet_omniglot_conv.h5'.format(timing))
diff --git a/skluc/examples/tasks/verification/omniglot/omniglot_28x28_siamese_networks_vgg19_less_pool.py b/skluc/examples/tasks/verification/omniglot/omniglot_28x28_siamese_networks_vgg19_less_pool.py
new file mode 100644
index 0000000000000000000000000000000000000000..9951c0c2413bd68c3f84ece5002f8b8dd9178329
--- /dev/null
+++ b/skluc/examples/tasks/verification/omniglot/omniglot_28x28_siamese_networks_vgg19_less_pool.py
@@ -0,0 +1,216 @@
+import time
+
+# import dill as pickle
+import keras
+import numpy as np
+import numpy.random as rng
+from keras import backend as K
+from keras.callbacks import TensorBoard
+from keras.initializers import he_normal
+from keras.layers import Conv2D, MaxPooling2D
+from keras.layers import Dense, Dropout, Activation, Flatten
+from keras.layers import Input, Lambda
+from keras.layers.normalization import BatchNormalization
+from keras.models import Model
+from keras.models import Sequential
+from keras.optimizers import Adam
+
+from skluc.main.data.mldatasets import OmniglotDataset
+from skluc.main.data.transformation.ResizeTransformer import ResizeTransformer
+from skluc.main.tensorflow_.utils import batch_generator
+from skluc.main.utils import logger
+
+
+def W_init(shape, name=None):
+    values = rng.normal(loc=0, scale=1e-2, size=shape)
+    return K.variable(values, name=name)
+
+
+def b_init(shape, name=None):
+    values = rng.normal(loc=0.5, scale=1e-2, size=shape)
+    return K.variable(values, name=name)
+
+
+def pairwise_output_shape(shapes):
+    shape1, shape2 = shapes
+    return shape1
+
+
+def build_vgg19_model():
+    model = Sequential()
+
+    # Block 1
+    model.add(Conv2D(64, (3, 3), padding='same', kernel_regularizer=keras.regularizers.l2(weight_decay),
+                     kernel_initializer=he_normal(), name='block1_conv1', input_shape=input_shape))
+    model.add(BatchNormalization())
+    model.add(Activation('relu'))
+    model.add(Conv2D(64, (3, 3), padding='same', kernel_regularizer=keras.regularizers.l2(weight_decay),
+                     kernel_initializer=he_normal(), name='block1_conv2'))
+    model.add(BatchNormalization())
+    model.add(Activation('relu'))
+    model.add(MaxPooling2D((2, 2), strides=(2, 2), name='block1_pool'))
+
+    # Block 2
+    model.add(Conv2D(128, (3, 3), padding='same', kernel_regularizer=keras.regularizers.l2(weight_decay),
+                     kernel_initializer=he_normal(), name='block2_conv1'))
+    model.add(BatchNormalization())
+    model.add(Activation('relu'))
+    model.add(Conv2D(128, (3, 3), padding='same', kernel_regularizer=keras.regularizers.l2(weight_decay),
+                     kernel_initializer=he_normal(), name='block2_conv2'))
+    model.add(BatchNormalization())
+    model.add(Activation('relu'))
+    model.add(MaxPooling2D((2, 2), strides=(2, 2), name='block2_pool'))
+
+    # Block 3
+    model.add(Conv2D(256, (3, 3), padding='same', kernel_regularizer=keras.regularizers.l2(weight_decay),
+                     kernel_initializer=he_normal(), name='block3_conv1'))
+    model.add(BatchNormalization())
+    model.add(Activation('relu'))
+    model.add(Conv2D(256, (3, 3), padding='same', kernel_regularizer=keras.regularizers.l2(weight_decay),
+                     kernel_initializer=he_normal(), name='block3_conv2'))
+    model.add(BatchNormalization())
+    model.add(Activation('relu'))
+    model.add(Conv2D(256, (3, 3), padding='same', kernel_regularizer=keras.regularizers.l2(weight_decay),
+                     kernel_initializer=he_normal(), name='block3_conv3'))
+    model.add(BatchNormalization())
+    model.add(Activation('relu'))
+    model.add(Conv2D(256, (3, 3), padding='same', kernel_regularizer=keras.regularizers.l2(weight_decay),
+                     kernel_initializer=he_normal(), name='block3_conv4'))
+    model.add(BatchNormalization())
+    model.add(Activation('relu'))
+    model.add(MaxPooling2D((2, 2), strides=(2, 2), name='block3_pool'))
+
+    # Block 4
+    model.add(Conv2D(512, (3, 3), padding='same', kernel_regularizer=keras.regularizers.l2(weight_decay),
+                     kernel_initializer=he_normal(), name='block4_conv1'))
+    model.add(BatchNormalization())
+    model.add(Activation('relu'))
+    model.add(Conv2D(512, (3, 3), padding='same', kernel_regularizer=keras.regularizers.l2(weight_decay),
+                     kernel_initializer=he_normal(), name='block4_conv2'))
+    model.add(BatchNormalization())
+    model.add(Activation('relu'))
+    model.add(Conv2D(512, (3, 3), padding='same', kernel_regularizer=keras.regularizers.l2(weight_decay),
+                     kernel_initializer=he_normal(), name='block4_conv3'))
+    model.add(BatchNormalization())
+    model.add(Activation('relu'))
+    model.add(Conv2D(512, (3, 3), padding='same', kernel_regularizer=keras.regularizers.l2(weight_decay),
+                     kernel_initializer=he_normal(), name='block4_conv4'))
+    model.add(BatchNormalization())
+    model.add(Activation('relu'))
+    model.add(MaxPooling2D((2, 2), strides=(2, 2), name='block4_pool'))
+
+    # Block 5
+    model.add(Conv2D(512, (3, 3), padding='same', kernel_regularizer=keras.regularizers.l2(weight_decay),
+                     kernel_initializer=he_normal(), name='block5_conv1'))
+    model.add(BatchNormalization())
+    model.add(Activation('relu'))
+    model.add(Conv2D(512, (3, 3), padding='same', kernel_regularizer=keras.regularizers.l2(weight_decay),
+                     kernel_initializer=he_normal(), name='block5_conv2'))
+    model.add(BatchNormalization())
+    model.add(Activation('relu'))
+    model.add(Conv2D(512, (3, 3), padding='same', kernel_regularizer=keras.regularizers.l2(weight_decay),
+                     kernel_initializer=he_normal(), name='block5_conv3'))
+    model.add(BatchNormalization())
+    model.add(Activation('relu'))
+    model.add(Conv2D(512, (3, 3), padding='same', kernel_regularizer=keras.regularizers.l2(weight_decay),
+                     kernel_initializer=he_normal(), name='block5_conv4'))
+    model.add(BatchNormalization())
+    model.add(Activation('relu'))
+
+    # model modification for cifar-10
+    model.add(Flatten(name='flatten'))
+    model.add(Dense(512, use_bias=True, kernel_regularizer=keras.regularizers.l2(weight_decay),
+                    kernel_initializer=he_normal(), name='fc_cifa10'))
+    model.add(BatchNormalization())
+    model.add(Activation('relu'))
+    model.add(Dropout(dropout))
+    model.add(
+        Dense(512, kernel_regularizer=keras.regularizers.l2(weight_decay), kernel_initializer=he_normal(), name='fc2'))
+    model.add(BatchNormalization())
+    model.add(Activation('relu'))
+    model.add(Dropout(dropout))
+
+    return model
+
+
+def model_definition():
+    x1 = Input(input_shape)
+    x2 = Input(input_shape)
+
+    convnet_model = build_vgg19_model()
+
+    repr_x1 = convnet_model(x1)
+    repr_x2 = convnet_model(x2)
+
+    pairwise_l1_dis = Lambda(lambda x: K.abs(x[0] - x[1]), output_shape=pairwise_output_shape)([repr_x1, repr_x2])
+    prediction = Dense(1, activation="sigmoid", bias_initializer=b_init)(pairwise_l1_dis)
+
+    siamese_net = Model(inputs=[x1, x2], outputs=prediction)
+
+    optimizer = Adam(6e-5)
+    siamese_net.compile(loss="binary_crossentropy", optimizer=optimizer, metrics=['accuracy'])
+
+    return convnet_model, siamese_net
+
+
+if __name__ == "__main__":
+    logger.debug("Executing file {}".format(__file__))
+    input_shape = (28, 28, 1)
+    validation_size = 1000
+    n_epoch = 200
+    batch_size = 128
+    eval_every = 10
+    nb_pairs = 30000
+    weight_decay = 0.0001
+    dropout = 0.5
+
+    tensorboard_path = "./siamese_vgg19_28x28_tensorboard"
+
+    convnet_model, siamese_net = model_definition()
+
+    tb_cb = TensorBoard(log_dir=tensorboard_path, histogram_freq=0)
+    cbks = [tb_cb]
+
+    data = OmniglotDataset(validation_size=validation_size, seed=0)
+    data.load()
+    data.normalize()
+    data.to_one_hot()
+    data.data_astype(np.float32)
+    data.labels_astype(np.float32)
+    data.to_image()
+    resize_trans = ResizeTransformer(data.s_name, output_shape=input_shape[:-1])
+    data.apply_transformer(resize_trans)
+
+    same_pairs = data.get_n_pairs(data.train.labels, True, nb_pairs // 2)
+    x_same, y_same = [data.train.data[same_pairs[:, 0]], data.train.data[same_pairs[:, 1]]], np.ones(nb_pairs // 2)
+    diff_pairs = data.get_n_pairs(data.train.labels, False, nb_pairs // 2)
+    x_diff, y_diff = [data.train.data[diff_pairs[:, 0]], data.train.data[diff_pairs[:, 1]]], np.zeros(nb_pairs // 2)
+
+    x_train, y_train = np.array([np.vstack((x_same[0], x_diff[0])), np.vstack((x_same[1], x_diff[1]))]), np.hstack(
+        (y_same, y_diff))
+    x_train = np.swapaxes(x_train, 0, 1)
+    shuffled_indexes = np.random.permutation(len(y_train))
+
+    i = 0
+    while i < n_epoch:
+        for X_batch, Y_batch in batch_generator(x_train[shuffled_indexes], y_train[shuffled_indexes], batch_size,
+                                                False):
+            loss = siamese_net.train_on_batch([X_batch[:, 0], X_batch[:, 1]], Y_batch)
+        if i % eval_every == 0:
+            same_pairs = data.get_n_pairs(data.validation.labels, True, batch_size // 2)
+            x_same, y_same = [data.validation.data[same_pairs[:, 0]], data.validation.data[same_pairs[:, 1]]], np.ones(
+                batch_size // 2)
+            diff_pairs = data.get_n_pairs(data.validation.labels, False, batch_size // 2)
+            x_diff, y_diff = [data.validation.data[diff_pairs[:, 0]], data.validation.data[diff_pairs[:, 1]]], np.zeros(
+                batch_size // 2)
+
+            x_batch, y_batch = [np.vstack((x_same[0], x_diff[0])), np.vstack((x_same[1], x_diff[1]))], np.hstack(
+                (y_same, y_diff))
+
+            result = siamese_net.evaluate(x=x_batch, y=y_batch)
+            print("iteration {}/{}, training loss: {}, eval: {}".format(i + 1, n_epoch, str(loss), str(result)))
+        i += 1
+
+    timing = time.time()
+    siamese_net.save('{}_siamese_vgg19_less_pool_omniglot_28x28_full.h5'.format(timing))
+    convnet_model.save('{}_siamese_vgg19_less_pool_omniglot_28x28_conv.h5'.format(timing))
diff --git a/skluc/examples/tasks/verification/omniglot/omniglot_28x28_siamese_networks_simple.py b/skluc/examples/tasks/verification/omniglot/omniglot_28x28_siamese_networks_vinyals.py
similarity index 72%
rename from skluc/examples/tasks/verification/omniglot/omniglot_28x28_siamese_networks_simple.py
rename to skluc/examples/tasks/verification/omniglot/omniglot_28x28_siamese_networks_vinyals.py
index 2a1734b309d968833bb77586f53643452136b079..641240a561bca2bd6a156988f219178af8a60fd3 100644
--- a/skluc/examples/tasks/verification/omniglot/omniglot_28x28_siamese_networks_simple.py
+++ b/skluc/examples/tasks/verification/omniglot/omniglot_28x28_siamese_networks_vinyals.py
@@ -5,17 +5,17 @@ import numpy as np
 import numpy.random as rng
 from keras import backend as K
 from keras.callbacks import TensorBoard
-from keras.layers import Conv2D, MaxPooling2D
+from keras.layers import Conv2D, MaxPooling2D, BatchNormalization, Activation
 from keras.layers import Dense, Flatten
 from keras.layers import Input, Lambda
 from keras.models import Model
 from keras.models import Sequential
 from keras.optimizers import Adam
-from keras.regularizers import l2
 
 from skluc.main.data.mldatasets import OmniglotDataset
 from skluc.main.data.transformation.ResizeTransformer import ResizeTransformer
 from skluc.main.tensorflow_.utils import batch_generator
+from skluc.main.utils import logger
 
 
 def W_init(shape, name=None):
@@ -33,31 +33,43 @@ def pairwise_output_shape(shapes):
     return shape1
 
 
-def build_simple_model():
-    convnet_model = Sequential()
-    # conv2D takes a function with shape and name arguments for initialization
-    convnet_model.add(Conv2D(64, (10, 10), padding="same", activation="relu", input_shape=input_shape,
-                             kernel_initializer=W_init, kernel_regularizer=l2(2e-4), bias_initializer=b_init))
-    convnet_model.add(MaxPooling2D())
-    convnet_model.add(Conv2D(128, (7, 7), padding="same", activation="relu",
-                             kernel_initializer=W_init, kernel_regularizer=l2(2e-4), bias_initializer=b_init))
-    convnet_model.add(MaxPooling2D())
-    convnet_model.add(Conv2D(128, (4, 4), padding="same", activation="relu",
-                             kernel_initializer=W_init, kernel_regularizer=l2(2e-4), bias_initializer=b_init))
-    convnet_model.add(MaxPooling2D())
-    convnet_model.add(Conv2D(256, (4, 4), padding="same", activation="relu",
-                             kernel_initializer=W_init, kernel_regularizer=l2(2e-4), bias_initializer=b_init))
-    convnet_model.add(Flatten())
-    convnet_model.add(Dense(4096, activation="sigmoid",
-                            kernel_initializer=W_init, kernel_regularizer=l2(1e-3), bias_initializer=b_init))
-    return convnet_model
+def build_vinyals_model():
+    model = Sequential()
+
+    model.add(
+        Conv2D(64, (3, 3), padding='same', kernel_initializer='he_normal', input_shape=input_shape))
+    model.add(BatchNormalization())
+    model.add(MaxPooling2D((2, 2), strides=(2, 2)))
+    model.add(Activation("relu"))
+
+    model.add(
+        Conv2D(64, (3, 3), padding='same', kernel_initializer='he_normal', input_shape=input_shape))
+    model.add(BatchNormalization())
+    model.add(MaxPooling2D((2, 2), strides=(2, 2)))
+    model.add(Activation("relu"))
+
+    model.add(
+        Conv2D(64, (3, 3), padding='same', kernel_initializer='he_normal', input_shape=input_shape))
+    model.add(BatchNormalization())
+    model.add(MaxPooling2D((2, 2), strides=(2, 2)))
+    model.add(Activation("relu"))
+
+    model.add(
+        Conv2D(64, (3, 3), padding='same', kernel_initializer='he_normal', input_shape=input_shape))
+    model.add(BatchNormalization())
+    model.add(MaxPooling2D((2, 2), strides=(2, 2)))
+    model.add(Activation("relu"))
+
+    model.add(Flatten())
+
+    return model
 
 
 def model_definition():
     x1 = Input(input_shape)
     x2 = Input(input_shape)
 
-    convnet_model = build_simple_model()
+    convnet_model = build_vinyals_model()
 
     repr_x1 = convnet_model(x1)
     repr_x2 = convnet_model(x2)
@@ -74,6 +86,7 @@ def model_definition():
 
 
 if __name__ == "__main__":
+    logger.debug("Executing file {}".format(__file__))
     input_shape = (28, 28, 1)
     validation_size = 1000
     n_epoch = 200
@@ -128,5 +141,5 @@ if __name__ == "__main__":
         i += 1
 
     timing = time.time()
-    siamese_net.save('{}_siamese_simple_omniglot_full.h5'.format(timing))
-    siamese_net.save('{}_siamese_simple_omniglot_conv.h5'.format(timing))
+    siamese_net.save('{}_siamese_vinyals_omniglot_full.h5'.format(timing))
+    convnet_model.save('{}_siamese_vinyals_omniglot_conv.h5'.format(timing))
diff --git a/skluc/examples/tasks/verification/omniglot/omniglot_32x32_siamese_networks_vgg19.py b/skluc/examples/tasks/verification/omniglot/omniglot_32x32_siamese_networks_vgg19.py
index ae40ca229dbdd8014594063f5ae138b74cbb67e0..a24697a8f67238e5ec9e28506ba73c33f6191594 100644
--- a/skluc/examples/tasks/verification/omniglot/omniglot_32x32_siamese_networks_vgg19.py
+++ b/skluc/examples/tasks/verification/omniglot/omniglot_32x32_siamese_networks_vgg19.py
@@ -18,6 +18,7 @@ from keras.optimizers import Adam
 from skluc.main.data.mldatasets import OmniglotDataset
 from skluc.main.data.transformation.ResizeTransformer import ResizeTransformer
 from skluc.main.tensorflow_.utils import batch_generator
+from skluc.main.utils import logger
 
 
 def W_init(shape, name=None):
@@ -158,6 +159,7 @@ def model_definition():
 
 
 if __name__ == "__main__":
+    logger.debug("Executing file {}".format(__file__))
     input_shape = (32, 32, 1)
     validation_size = 1000
     n_epoch = 200
@@ -213,4 +215,4 @@ if __name__ == "__main__":
 
     timing = time.time()
     siamese_net.save('{}_siamese_vgg19_omniglot_full.h5'.format(timing))
-    siamese_net.save('{}_siamese_vgg19_omniglot_conv.h5'.format(timing))
+    convnet_model.save('{}_siamese_vgg19_omniglot_conv.h5'.format(timing))
diff --git a/skluc/main/data/mldatasets/Dataset.py b/skluc/main/data/mldatasets/Dataset.py
index 807771c2c4d572907eec798608b96bfe449b11a6..1aa84595cbe60dcaf4e833b45bacde2f0d82a785 100644
--- a/skluc/main/data/mldatasets/Dataset.py
+++ b/skluc/main/data/mldatasets/Dataset.py
@@ -39,7 +39,8 @@ class Dataset(object):
         kept_indices = self.get_uniform_class_rand_indices_train(new_size)
         self.permuted_index_train = self.permuted_index_train[kept_indices]
 
-    def get_bool_idx_label(self, find_label, labels):
+    @staticmethod
+    def get_bool_idx_label(find_label, labels):
         """
         return the np.array bool where find_label == label in labels
         :param find_label:
diff --git a/skluc/main/data/mldatasets/OmniglotDataset.py b/skluc/main/data/mldatasets/OmniglotDataset.py
index 795f1de27b0a8a7de1c37d5fb24f2fb568ddd3c2..e7141fba79d23cab85037232bff584bdbfd8ae59 100644
--- a/skluc/main/data/mldatasets/OmniglotDataset.py
+++ b/skluc/main/data/mldatasets/OmniglotDataset.py
@@ -109,6 +109,8 @@ class OmniglotDataset(ImageDataset):
             self._check_validation_size(self._train[0].shape[0])
 
             self.save_npz()
+        logger.debug("Number of labels in train set {}".format(len(np.unique(self._train.labels, axis=0))))
+        logger.debug("Number of labels in evaluation set {}".format(len(np.unique(self._test.labels, axis=0))))
 
 
 if __name__ == "__main__":
@@ -116,6 +118,7 @@ if __name__ == "__main__":
 
     d = OmniglotDataset(validation_size=10000)
     d.load()
+    exit()
     d.to_image()
     d.normalize()
     for i, im in enumerate(d.train.data):
diff --git a/skluc/main/data/transformation/KerasModelTransformer.py b/skluc/main/data/transformation/KerasModelTransformer.py
index 1aa35ea69b74ed57f268697b86348066f1eb8000..9108ff4d522db8a897dd49edb7b48474bb88e0f3 100644
--- a/skluc/main/data/transformation/KerasModelTransformer.py
+++ b/skluc/main/data/transformation/KerasModelTransformer.py
@@ -1,4 +1,5 @@
 import os
+
 import numpy as np
 
 from skluc.main.data.transformation.Transformer import Transformer
@@ -6,6 +7,7 @@ from skluc.main.utils import check_file_md5, logger
 
 
 class KerasModelTransformer(Transformer):
+    # todo vgg19 et lecun ne changent rien
 
     MAP_DATA_MODEL = {}
 
diff --git a/skluc/main/data/transformation/LeCunTransformer.py b/skluc/main/data/transformation/LeCunTransformer.py
index 92e0ddffdb19142bdcf420cfabe2dfa3408df627..39bc52b7580ea8206421f823e51cb988356644bd 100644
--- a/skluc/main/data/transformation/LeCunTransformer.py
+++ b/skluc/main/data/transformation/LeCunTransformer.py
@@ -1,12 +1,11 @@
-from keras.models import load_model
-
 from keras import Model
+from keras.models import load_model
 
 from skluc.main.data.transformation.KerasModelTransformer import KerasModelTransformer
-from skluc.main.utils import logger, create_directory, download_data, check_file_md5, DownloadableModel
+from skluc.main.utils import logger, create_directory, download_data, check_file_md5, DownloadableModel, Singleton
 
 
-class LecunTransformer(KerasModelTransformer):
+class LecunTransformer(KerasModelTransformer, metaclass=Singleton):
     """
     Uses the lenet network to transform input data.
     """
@@ -15,12 +14,34 @@ class LecunTransformer(KerasModelTransformer):
         "mnist": DownloadableModel(
             url="https://pageperso.lis-lab.fr/~luc.giffon/models/1524640419.938414_lenet_mnist.h5",
             checksum="527d7235c213278df1d15d3fe685eb5c"),
+        "siamese_omniglot_28x28": DownloadableModel(
+            url="https://pageperso.lis-lab.fr/~luc.giffon/models/1536239708.891906_siamese_lenet_omniglot_conv.h5",
+            checksum="5092edcb0be7b31b808e221afcede3e6"
+        ),
+        "omniglot_28x28": DownloadableModel(
+            url="https://pageperso.lis-lab.fr/~luc.giffon/models/1536750152.6389275_lenet_omniglot_28x28.h5",
+            checksum="c4f20b6dae0722234e1ec0bee85e3a4d"
+        )
     }
 
-    def __init__(self, data_name):
+    def __init__(self, data_name, cut_layer_name=None, cut_layer_index=None):
         if data_name not in self.MAP_DATA_MODEL.keys():
             raise ValueError("Unknown data name. Can't load weights")
-        transformation_name = self.__class__.__name__
+
+        if cut_layer_name is None and cut_layer_index is None:
+            logger.warning(
+                "Cut layer chosen automatically but it eventually will lead to an error in future: index -1 should be specified explicitly")
+            cut_layer_index = -1
+        if cut_layer_name is not None:
+            transformation_name = str(data_name) + "_" + self.__class__.__name__ + "_" + str(cut_layer_name)
+        elif cut_layer_index is not None:
+            transformation_name = str(data_name) + "_" + self.__class__.__name__ \
+                                  + "_" + str(cut_layer_index)
+            # todo sauvegarder index / nom dans le meme dossier si c'est les meme
+        else:
+            raise AttributeError("Cut layer name or cut_layer index must be given to init VGG19Transformer.")
+        self.__cut_layer_name = cut_layer_name
+        self.__cut_layer_index = cut_layer_index
 
         self.keras_model = None
 
@@ -32,10 +53,24 @@ class LecunTransformer(KerasModelTransformer):
         s_model_path = download_data(self.MAP_DATA_MODEL[self.data_name].url, self.s_download_dir)
         check_file_md5(s_model_path, self.MAP_DATA_MODEL[self.data_name].checksum)
         if self.keras_model is None:
-            logger.debug("Loading Lecun model with {} weights".format(self.data_name))
+            logger.debug("Loading {} model for {} transformation with {} weights".format(self.__class__.__name__,
+                                                                                         self.transformation_name,
+                                                                                         self.data_name))
             self.keras_model = load_model(s_model_path)
 
+            logger.debug("Layers of model {}".format([l.name for l in self.keras_model.layers]))
+
+            if self.__cut_layer_index is not None:
+                cut_layer = self.keras_model.layers[-1]
+                self.__cut_layer_name = cut_layer.name
+                logger.debug(
+                    "Found associated layer {} to layer index {}".format(self.__cut_layer_name, self.__cut_layer_index))
+
             self.keras_model = Model(inputs=self.keras_model.input,
-                                     outputs=self.keras_model.get_layer('conv_pool_2').output)
+                                     outputs=self.keras_model.get_layer(name=self.__cut_layer_name).output)
+
         else:
-            logger.debug("Skip loading model Lecun model with {} weights. Already there.".format(self.data_name))
+            logger.debug("Skip loading model {} for {} transformation with {} weights. Already there.".format(
+                self.__class__.__name__,
+                self.transformation_name,
+                self.data_name))
diff --git a/skluc/main/data/transformation/VGG19Transformer/__init__.py b/skluc/main/data/transformation/VGG19Transformer/__init__.py
index 9af31f4a586997f649249023de024d51faa7fd81..cfbbdde0cea5d83c0b65aac458c84cfe5d2b03d8 100644
--- a/skluc/main/data/transformation/VGG19Transformer/__init__.py
+++ b/skluc/main/data/transformation/VGG19Transformer/__init__.py
@@ -14,31 +14,42 @@ class VGG19Transformer(KerasModelTransformer, metaclass=Singleton):
     MAP_DATA_MODEL = {
         "svhn": DownloadableModel(
             url="https://pageperso.lis-lab.fr/~luc.giffon/models/1529968150.5454917_vgg19_svhn.h5",
-            checksum="563a9ec2aad37459bd1ed0e329441b05"),
+            checksum="563a9ec2aad37459bd1ed0e329441b05"
+        ),
         "cifar100": DownloadableModel(
             url="https://pageperso.lis-lab.fr/~luc.giffon/models/1530965727.781668_vgg19_cifar100fine.h5",
-            checksum="edf43e263fec05e2c013dd5a2128fc38"),
+            checksum="edf43e263fec05e2c013dd5a2128fc38"
+        ),
         "cifar10": DownloadableModel(
             url="https://pageperso.lis-lab.fr/~luc.giffon/models/1522967518.1916964_vgg19_cifar10.h5",
-            checksum="0dbb4f02ceb1f4acb6e24831758106e5")
+            checksum="0dbb4f02ceb1f4acb6e24831758106e5"
+        ),
+        "siamese_omniglot_28x28": DownloadableModel(
+            url="https://pageperso.lis-lab.fr/~luc.giffon/models/1536244775.6502118_siamese_vgg19_omniglot_28x28_conv.h5",
+            checksum="90aec06e688ec3248ba89544a10c9f1f"
+        ),
+        "omniglot_28x28": DownloadableModel(
+            url="https://pageperso.lis-lab.fr/~luc.giffon/models/1536764034.66037_vgg19_omniglot.h5",
+            checksum="ef1272e9c7ce070e8f70889ec58d1c33"
+        )
     }
 
     def __init__(self, data_name, cut_layer_name=None, cut_layer_index=None):
         if data_name not in self.MAP_DATA_MODEL.keys():
             raise ValueError("Unknown data name. Can't load weights")
-        else:
-            data_name = data_name
 
         if cut_layer_name is None and cut_layer_index is None:
+            logger.warning(
+                "Cut layer chosen automatically but it eventually will lead to an error in future: block5_pool should be specified explicitly")
             cut_layer_name = "block5_pool"
         if cut_layer_name is not None:
-            transformation_name = self.__class__.__name__ + "_" + str(cut_layer_name)
+            transformation_name = str(data_name) + "_" + self.__class__.__name__ + "_" + str(cut_layer_name)
         elif cut_layer_index is not None:
-            transformation_name = self.__class__.__name__ \
-                                       + "_" + str(cut_layer_index)
+            transformation_name = str(data_name) + "_" + self.__class__.__name__ \
+                                  + "_" + str(cut_layer_index)
             # todo sauvegarder index / nom dans le meme dossier si c'est les meme
         else:
-            raise AttributeError("Cut layer name or cut_layer index must be given to init VGG19Cifar10Transformer.")
+            raise AttributeError("Cut layer name or cut_layer index must be given to init VGG19Transformer.")
         self.__cut_layer_name = cut_layer_name
         self.__cut_layer_index = cut_layer_index
 
@@ -55,15 +66,21 @@ class VGG19Transformer(KerasModelTransformer, metaclass=Singleton):
             logger.debug("Loading VGG19 model for {} transformation with {} weights".format(self.transformation_name, self.data_name))
             self.keras_model = load_model(s_model_path)
 
-            if self.__cut_layer_name is not None:
-                self.keras_model = Model(inputs=self.keras_model.input,
-                                            outputs=self.keras_model.get_layer(name=self.__cut_layer_name).output)
-            elif self.__cut_layer_index is not None:
-                self.keras_model = Model(inputs=self.keras_model.input,
-                                            outputs=self.keras_model.get_layer(name=self.__cut_layer_index).output)
+            logger.debug("Layers of model {}".format([l.name for l in self.keras_model.layers]))
+
+            if self.__cut_layer_index is not None:
+                cut_layer = self.keras_model.layers[-1]
+                self.__cut_layer_name = cut_layer.name
+                logger.debug(
+                    "Found associated layer {} to layer index {}".format(self.__cut_layer_name, self.__cut_layer_index))
+
+            self.keras_model = Model(inputs=self.keras_model.input,
+                                     outputs=self.keras_model.get_layer(name=self.__cut_layer_name).output)
 
         else:
-            logger.debug("Skip loading model VGG19 for {} transformation with {} weights. Already there.".format(self.transformation_name, self.data_name))
+            logger.debug("Skip loading model VGG19 for {} transformation with {} weights. Already there.".format(
+                self.transformation_name,
+                self.data_name))
 
 
 if __name__ == '__main__':
diff --git a/skluc/main/data/transformation/VinyalsTransformer.py b/skluc/main/data/transformation/VinyalsTransformer.py
new file mode 100644
index 0000000000000000000000000000000000000000..9658af62523d6909d0ae0fda67d30cd3d2587c12
--- /dev/null
+++ b/skluc/main/data/transformation/VinyalsTransformer.py
@@ -0,0 +1,73 @@
+from keras import Model
+from keras.models import load_model
+
+from skluc.main.data.transformation.KerasModelTransformer import KerasModelTransformer
+from skluc.main.utils import logger, create_directory, download_data, check_file_md5, DownloadableModel, Singleton
+
+
+class VinyalsTransformer(KerasModelTransformer, metaclass=Singleton):
+    """
+    Uses the vinyals network to transform input data.
+    """
+
+    MAP_DATA_MODEL = {
+        "siamese_omniglot_28x28": DownloadableModel(
+            url="https://pageperso.lis-lab.fr/~luc.giffon/models/1536240331.3177369_siamese_vinyals_omniglot_conv.h5",
+            checksum="a0b815ad2ab81092c75d129f511b2bdb"
+        ),
+        "omniglot_28x28": DownloadableModel(
+            url="https://pageperso.lis-lab.fr/~luc.giffon/models/1536742266.9412131_vinyals_omniglot.h5",
+            checksum="6460eb1b7eaa478301a281b12ecd2461"
+        )
+    }
+
+    def __init__(self, data_name, cut_layer_name=None, cut_layer_index=None):
+        if data_name not in self.MAP_DATA_MODEL.keys():
+            raise ValueError("Unknown data name. Can't load weights")
+
+        if cut_layer_name is None and cut_layer_index is None:
+            logger.warning(
+                "Cut layer chosen automatically but it eventually will lead to an error in future: index -1 should be specified explicitly")
+            cut_layer_index = -1
+        if cut_layer_name is not None:
+            transformation_name = str(data_name) + "_" + self.__class__.__name__ + "_" + str(cut_layer_name)
+        elif cut_layer_index is not None:
+            transformation_name = str(data_name) + "_" + self.__class__.__name__ \
+                                  + "_" + str(cut_layer_index)
+            # todo sauvegarder index / nom dans le meme dossier si c'est les meme
+        else:
+            raise AttributeError("Cut layer name or cut_layer index must be given to init VGG19Transformer.")
+        self.__cut_layer_name = cut_layer_name
+        self.__cut_layer_index = cut_layer_index
+
+        self.keras_model = None
+
+        super().__init__(data_name=data_name,
+                         transformation_name=transformation_name)
+
+    def load(self):
+        create_directory(self.s_download_dir)
+        s_model_path = download_data(self.MAP_DATA_MODEL[self.data_name].url, self.s_download_dir)
+        check_file_md5(s_model_path, self.MAP_DATA_MODEL[self.data_name].checksum)
+        if self.keras_model is None:
+            logger.debug("Loading {} model for {} transformation with {} weights".format(self.__class__.__name__,
+                                                                                         self.transformation_name,
+                                                                                         self.data_name))
+            self.keras_model = load_model(s_model_path)
+
+            logger.debug("Layers of model {}".format([l.name for l in self.keras_model.layers]))
+
+            if self.__cut_layer_index is not None:
+                cut_layer = self.keras_model.layers[-1]
+                self.__cut_layer_name = cut_layer.name
+                logger.debug(
+                    "Found associated layer {} to layer index {}".format(self.__cut_layer_name, self.__cut_layer_index))
+
+            self.keras_model = Model(inputs=self.keras_model.input,
+                                     outputs=self.keras_model.get_layer(name=self.__cut_layer_name).output)
+
+        else:
+            logger.debug("Skip loading model {} for {} transformation with {} weights. Already there.".format(
+                self.__class__.__name__,
+                self.transformation_name,
+                self.data_name))
diff --git a/skluc/main/tools/experiences/cluger.py b/skluc/main/tools/experiences/cluger.py
index 6c02a21057c3868a686102d64c212591d7436917..9729850c037114c112bcdc82d9174afe45fc5246 100644
--- a/skluc/main/tools/experiences/cluger.py
+++ b/skluc/main/tools/experiences/cluger.py
@@ -24,8 +24,8 @@ import time
 
 import docopt
 
-from skluc.main.tools import process_script_params, run as executioner
-from skluc.main.tools import oarcmd
+from skluc.main.tools.experiences.executioner import process_script_params, run as executioner
+from skluc.main.tools.experiences.oarCmdGenerator import oarcmd
 
 if __name__ == '__main__':
     arguments = docopt.docopt(__doc__)
diff --git a/skluc/main/tools/experiences/executioner.py b/skluc/main/tools/experiences/executioner.py
index 8ad5a2c574d386b35a8705ddf02198c19b26987f..159ec37d050b17a92130ff847ccdfc019367ec16 100644
--- a/skluc/main/tools/experiences/executioner.py
+++ b/skluc/main/tools/experiences/executioner.py
@@ -38,7 +38,7 @@ from contextlib import redirect_stdout
 import docopt
 import pip
 
-from skluc.main.tools import oarcmd
+from skluc.main.tools.experiences.oarCmdGenerator import oarcmd
 from skluc.main.utils import create_directory, logger