From 8bdf07cfa5c80fda5487f554ed9885fc69064b20 Mon Sep 17 00:00:00 2001
From: Luc Giffon <luc.giffon@lis-lab.fr>
Date: Thu, 29 Nov 2018 19:27:37 +0100
Subject: [PATCH] continue setting up experiment keras end to end

add 1 gamma parameter to grid search
script now supports mutliple kernels instead of only chi2
---
 ...file_classif_end_to_end_with_augment_keras.yml |  4 ++--
 .../deepstrom_classif_end_to_end.py               | 15 +++++++++++++--
 2 files changed, 15 insertions(+), 4 deletions(-)

diff --git a/main/experiments/parameter_files/november_2018/classif_end_to_end_with_augment/lazyfile_classif_end_to_end_with_augment_keras.yml b/main/experiments/parameter_files/november_2018/classif_end_to_end_with_augment/lazyfile_classif_end_to_end_with_augment_keras.yml
index 3a481a9..1eb1c8e 100644
--- a/main/experiments/parameter_files/november_2018/classif_end_to_end_with_augment/lazyfile_classif_end_to_end_with_augment_keras.yml
+++ b/main/experiments/parameter_files/november_2018/classif_end_to_end_with_augment/lazyfile_classif_end_to_end_with_augment_keras.yml
@@ -2,7 +2,7 @@ all:
   dense:
   deepfried:
   deepstrom_no_gamma:
-#  deepstrom_gamma:
+  deepstrom_gamma:
 
 base:
   epoch_numbers: {"-e": [200]}
@@ -18,7 +18,7 @@ dense:
   repr_dim: {"-D": [16, 64, 128, 1024]}
 
 gamma:
-  gamma: {"-g": [0.001, 0.005, 0.01, 0.05]}
+  gamma: {"-g": [0.001, 0.005, 0.01, 0.05, 0.1]}
 
 deepfried:
   network: ["deepfriedconvnet"]
diff --git a/main/experiments/scripts/november_2018/keras_end_to_end/deepstrom_classif_end_to_end.py b/main/experiments/scripts/november_2018/keras_end_to_end/deepstrom_classif_end_to_end.py
index 6e8385e..be1d657 100644
--- a/main/experiments/scripts/november_2018/keras_end_to_end/deepstrom_classif_end_to_end.py
+++ b/main/experiments/scripts/november_2018/keras_end_to_end/deepstrom_classif_end_to_end.py
@@ -62,7 +62,7 @@ from keras.optimizers import Adam
 from keras.preprocessing.image import ImageDataGenerator
 
 import skluc.main.data.mldatasets as dataset
-from skluc.main.keras_.kernel import keras_chi_square_CPD
+from skluc.main.keras_.kernel import map_kernel_name_function
 # from skluc.main.keras_.kernel_approximation.nystrom_layer import DeepstromLayerEndToEnd
 from skluc.main.keras_.kernel_approximation.fastfood_layer import FastFoodLayer
 from skluc.main.keras_.models import build_lenet_model, build_vgg19_model_glorot
@@ -262,7 +262,18 @@ def main(paraman: ParameterManagerMain, resman, printman):
 
         repr_sub = convnet_model(input_subsample_concat)
 
-        kernel_layer = Lambda(lambda *args, **kwargs: keras_chi_square_CPD(*args, epsilon=1e-8, **kwargs), output_shape=lambda shapes: (shapes[0][0], paraman["--nys-size"]))
+        if paraman["kernel"] == "linear":
+            kernel_function = lambda *args, **kwargs: map_kernel_name_function["linear"](*args, **kwargs, normalize=True, **paraman["kernel_dict"])
+        elif paraman["kernel"] == "rbf":
+            kernel_function = lambda *args, **kwargs: map_kernel_name_function["rbf"](*args, **kwargs, tanh_activation=True, normalize=True, **paraman["kernel_dict"])
+        elif paraman["kernel"] == "chi2_cpd":
+            kernel_function = lambda *args, **kwargs: map_kernel_name_function["chi2_cpd"](*args, **kwargs, epsilon=1e-8, tanh_activation=True, normalize=True, **paraman["kernel_dict"])
+        elif paraman["kernel"] == "chi2_exp_cpd":
+            kernel_function = lambda *args, **kwargs: map_kernel_name_function["chi2_exp_cpd"](*args, **kwargs, epsilon=1e-8, tanh_activation=True, normalize=True, **paraman["kernel_dict"])
+        else:
+            raise NotImplementedError(f"unknown kernel function {paraman['kernel']}")
+
+        kernel_layer = Lambda(kernel_function, output_shape=lambda shapes: (shapes[0][0], paraman["--nys-size"]))
         kernel_vector = kernel_layer([repr_x, repr_sub])
 
         input_classifier = Dense(paraman["--nys-size"], use_bias=False, activation='linear')(kernel_vector)  # 512 is the output dim of convolutional layers
-- 
GitLab