From 7dd4860c2647cee0a773e6eaa9168fef2917b7c7 Mon Sep 17 00:00:00 2001
From: Luc Giffon <luc.giffon@lis-lab.fr>
Date: Tue, 13 Nov 2018 08:45:13 +0100
Subject: [PATCH] solve infinite loop issue

---
 .../deepstrom_classif_end_to_end.py                    |  8 ++++----
 .../deepstrom_classif_end_to_end.py                    | 10 ++++++----
 2 files changed, 10 insertions(+), 8 deletions(-)

diff --git a/main/experiments/scripts/november_2018/end_to_end_with_2_layers_only_dense_with_augment/deepstrom_classif_end_to_end.py b/main/experiments/scripts/november_2018/end_to_end_with_2_layers_only_dense_with_augment/deepstrom_classif_end_to_end.py
index 9af6985..7e99250 100644
--- a/main/experiments/scripts/november_2018/end_to_end_with_2_layers_only_dense_with_augment/deepstrom_classif_end_to_end.py
+++ b/main/experiments/scripts/november_2018/end_to_end_with_2_layers_only_dense_with_augment/deepstrom_classif_end_to_end.py
@@ -247,7 +247,7 @@ def main(paraman, resman, printman):
 
     summary_writer = None
     if paraman["--tensorboard"]:
-        summary_writer = tf.summary.FileWriter("debug_classification_end_to_end")
+        summary_writer = tf.summary.FileWriter(f"log/{int(t.time())}/{paraman['dataset']}/nys_size_{paraman['--nys-size']}/")
 
     # In[7]:
 
@@ -262,8 +262,7 @@ def main(paraman, resman, printman):
         j = 0
         for i in range(paraman["--num-epoch"]):
             logger.debug(memory_usage())
-            k = 0
-            for X_batch, Y_batch in datagen.flow(X_train, y_train, batch_size=paraman["--batch-size"]):
+            for k, (X_batch, Y_batch) in enumerate(datagen.flow(X_train, y_train, batch_size=paraman["--batch-size"])):
                 if paraman["network"] == "deepstrom":
                     feed_dict = {x: X_batch, y: Y_batch, subs: nys_subsample}
                 else:
@@ -277,8 +276,9 @@ def main(paraman, resman, printman):
                                                                                                 acc))
                     if paraman["--tensorboard"]:
                         summary_writer.add_summary(summary_str, j)
-                k += 1
                 j += 1
+                if k > int(data.train[0].shape[0] / paraman["--batch-size"]):
+                    break
 
         logger.info("Evaluation on validation data")
         training_time = t.time() - global_start
diff --git a/main/experiments/scripts/november_2018/end_to_end_with_augment/deepstrom_classif_end_to_end.py b/main/experiments/scripts/november_2018/end_to_end_with_augment/deepstrom_classif_end_to_end.py
index 3208b9f..903d054 100644
--- a/main/experiments/scripts/november_2018/end_to_end_with_augment/deepstrom_classif_end_to_end.py
+++ b/main/experiments/scripts/november_2018/end_to_end_with_augment/deepstrom_classif_end_to_end.py
@@ -239,7 +239,7 @@ def main(paraman, resman, printman):
 
     summary_writer = None
     if paraman["--tensorboard"]:
-        summary_writer = tf.summary.FileWriter("debug_classification_end_to_end")
+        summary_writer = tf.summary.FileWriter(f"log/{int(t.time())}/{paraman['dataset']}/nys_size_{paraman['--nys-size']}/")
 
     # In[7]:
 
@@ -254,8 +254,8 @@ def main(paraman, resman, printman):
         j = 0
         for i in range(paraman["--num-epoch"]):
             logger.debug(memory_usage())
-            k = 0
-            for X_batch, Y_batch in datagen.flow(X_train, y_train, batch_size=paraman["--batch-size"]):
+
+            for k, (X_batch, Y_batch) in enumerate(datagen.flow(X_train, y_train, batch_size=paraman["--batch-size"])):
                 if paraman["network"] == "deepstrom":
                     feed_dict = {x: X_batch, y: Y_batch, subs: nys_subsample}
                 else:
@@ -269,8 +269,10 @@ def main(paraman, resman, printman):
                                                                                                 acc))
                     if paraman["--tensorboard"]:
                         summary_writer.add_summary(summary_str, j)
-                k += 1
                 j += 1
+                if k > int(data.train[0].shape[0] / paraman["--batch-size"]):
+                    break
+
 
         logger.info("Evaluation on validation data")
         training_time = t.time() - global_start
-- 
GitLab