Commit 4266b4af authored by Luc Giffon's avatar Luc Giffon
Browse files

first commit + commit du document de these

parents
results/*
reports/*
models/*
docs/*
data/*
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
# C extensions
*.so
# Distribution / packaging
.Python
env/
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
*.egg-info/
.installed.cfg
*.egg
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
# Translations
*.mo
*.pot
# Django stuff:
*.log
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# DotEnv configuration
.env
# Database
*.db
*.rdb
# Pycharm
.idea
# VS Code
.vscode/
# Spyder
.spyproject/
# Jupyter NB Checkpoints
.ipynb_checkpoints/
# exclude data from source control by default
/data/
# Mac OS-specific storage files
.DS_Store
# vim
*.swp
*.swo
The MIT License (MIT)
Copyright (c) 2018, Luc Giffon
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
.PHONY: clean data lint requirements sync_data_to_s3 sync_data_from_s3
#################################################################################
# GLOBALS #
#################################################################################
PROJECT_DIR := $(shell dirname $(realpath $(lastword $(MAKEFILE_LIST))))
BUCKET = [OPTIONAL] your-bucket-for-syncing-data (do not include 's3://')
PROFILE = default
PROJECT_NAME = deepstrom_network
PYTHON_INTERPRETER = python3
ifeq (,$(shell which conda))
HAS_CONDA=False
else
HAS_CONDA=True
endif
#################################################################################
# COMMANDS #
#################################################################################
## Install Python Dependencies
requirements: test_environment
$(PYTHON_INTERPRETER) -m pip install -U pip setuptools wheel
$(PYTHON_INTERPRETER) -m pip install -r requirements.txt
## Make Dataset
data: requirements
$(PYTHON_INTERPRETER) src/data/make_dataset.py all data/external
cifar100: data/external/cifar100fine.npz
data/external/cifar100fine.npz:
$(PYTHON_INTERPRETER) src/data/make_dataset.py cifar100 data/external
cifar10: data/external/cifar10.npz
data/external/cifar10.npz:
$(PYTHON_INTERPRETER) src/data/make_dataset.py cifar10 data/external
mnist: data/external/mnist.npz
data/external/mnist.npz:
$(PYTHON_INTERPRETER) src/data/make_dataset.py mnist data/external
svhn: data/external/svhn.npz
data/external/svhn.npz:
$(PYTHON_INTERPRETER) src/data/make_dataset.py svhn data/external
## Make Transforms
transforms: transform_vgg19 transform_lenet
transform_vgg19: transform_vgg19_cifar10_block5_pool transform_vgg19_svhn_block5_pool
transform_lenet: transform_lenet_mnist_conv_pool_2
transform_lenet_mnist_conv_pool_2: data/processed/lenet/conv_pool_2/mnist.npz
data/processed/lenet/conv_pool_2/mnist.npz: mnist lenet_mnist
$(PYTHON_INTERPRETER) src/features/build_features.py mnist lenet conv_pool_2 mnist data/processed
transform_vgg19_cifar10_block5_pool: data/processed/vgg19/block5_pool/cifar10.npz
data/processed/vgg19/block5_pool/cifar10.npz: cifar10 vgg19_cifar10
$(PYTHON_INTERPRETER) src/features/build_features.py cifar10 vgg19 block5_pool cifar10 data/processed
transform_vgg19_svhn_block5_pool: data/processed/vgg19/block5_pool/svhn.npz
data/processed/vgg19/block5_pool/svhn.npz: svhn vgg19_svhn
$(PYTHON_INTERPRETER) src/features/build_features.py svhn vgg19 block5_pool svhn data/processed
## Download models
models: vgg19 lenet
vgg19:
$(PYTHON_INTERPRETER) src/models/download_model.py vgg19 all models/external
vgg19_cifar10: models/external/vgg19/cifar10/1544802301.9379897_vgg19_Cifar10Dataset.h5
models/external/vgg19/cifar10/1544802301.9379897_vgg19_Cifar10Dataset.h5:
$(PYTHON_INTERPRETER) src/models/download_model.py vgg19 cifar10 models/external
vgg19_cifar100: models/external/vgg19/cifar100/1530965727.781668_vgg19_cifar100fine.h5
models/external/vgg19/cifar100/1530965727.781668_vgg19_cifar100fine.h5:
$(PYTHON_INTERPRETER) src/models/download_model.py vgg19 cifar100 models/external
vgg19_svhn: models/external/vgg19/svhn/1529968150.5454917_vgg19_svhn.h5
models/external/vgg19/svhn/1529968150.5454917_vgg19_svhn.h5:
$(PYTHON_INTERPRETER) src/models/download_model.py vgg19 svhn models/external
vgg19_siamese_omniglot_28x28: models/external/vgg19/siamese_omniglot_28x28/11536244775.6502118_siamese_vgg19_omniglot_28x28_conv.h5
models/external/vgg19/siamese_omniglot_28x28/1536244775.6502118_siamese_vgg19_omniglot_28x28_conv.h5:
$(PYTHON_INTERPRETER) src/models/download_model.py vgg19 siamese_omniglot_28x28 models/external
vgg19_omniglot_28x28: models/external/vgg19/omniglot_28x28/1536764034.66037_vgg19_omniglot.h5
models/external/vgg19/omniglot_28x28/1536764034.66037_vgg19_omniglot.h5:
$(PYTHON_INTERPRETER) src/models/download_model.py vgg19 omniglot_28x28 models/external
lenet:
$(PYTHON_INTERPRETER) src/models/download_model.py lenet all models/external
lenet_mnist: models/external/lenet/1524640419.938414_lenet_mnist.h5
models/external/lenet/1524640419.938414_lenet_mnist.h5:
$(PYTHON_INTERPRETER) src/models/download_model.py lenet mnist models/external
lenet_siamese_omniglot_28x28: models/external/lenet/1536239708.891906_siamese_lenet_omniglot_conv.h5
models/external/lenet/1536239708.891906_siamese_lenet_omniglot_conv.h5:
$(PYTHON_INTERPRETER) src/models/download_model.py lenet siamese_omniglot_28x28 models/external
lenet_omniglot_28x28: models/external/lenet/1536750152.6389275_lenet_omniglot_28x28.h5
models/external/lenet/1536750152.6389275_lenet_omniglot_28x28.h5:
$(PYTHON_INTERPRETER) src/models/download_model.py lenet omniglot_28x28 models/external
## Delete all compiled Python files
clean:
find . -type f -name "*.py[co]" -delete
find . -type d -name "__pycache__" -delete
# rm -rf data/raw/*
rm -rf data/external/*
rm -rf data/processed/*
rm -rf models/external/*
## Lint using flake8
lint:
flake8 src
## Upload Data to S3
sync_data_to_s3:
ifeq (default,$(PROFILE))
aws s3 sync data/ s3://$(BUCKET)/data/
else
aws s3 sync data/ s3://$(BUCKET)/data/ --profile $(PROFILE)
endif
## Download Data from S3
sync_data_from_s3:
ifeq (default,$(PROFILE))
aws s3 sync s3://$(BUCKET)/data/ data/
else
aws s3 sync s3://$(BUCKET)/data/ data/ --profile $(PROFILE)
endif
## Set up python interpreter environment
create_environment:
ifeq (True,$(HAS_CONDA))
@echo ">>> Detected conda, creating conda environment."
ifeq (3,$(findstring 3,$(PYTHON_INTERPRETER)))
conda create --name $(PROJECT_NAME) python=3
else
conda create --name $(PROJECT_NAME) python=2.7
endif
@echo ">>> New conda env created. Activate with:\nsource activate $(PROJECT_NAME)"
else
$(PYTHON_INTERPRETER) -m pip install -q virtualenv virtualenvwrapper
@echo ">>> Installing virtualenvwrapper if not already intalled.\nMake sure the following lines are in shell startup file\n\
export WORKON_HOME=$$HOME/.virtualenvs\nexport PROJECT_HOME=$$HOME/Devel\nsource /usr/local/bin/virtualenvwrapper.sh\n"
@bash -c "source `which virtualenvwrapper.sh`;mkvirtualenv $(PROJECT_NAME) --python=$(PYTHON_INTERPRETER)"
@echo ">>> New virtualenv created. Activate with:\nworkon $(PROJECT_NAME)"
endif
## Test python environment is setup correctly
test_environment:
$(PYTHON_INTERPRETER) test_environment.py
#################################################################################
# PROJECT RULES #
#################################################################################
#################################################################################
# Self Documenting Commands #
#################################################################################
.DEFAULT_GOAL := help
# Inspired by <http://marmelab.com/blog/2016/02/29/auto-documented-makefile.html>
# sed script explained:
# /^##/:
# * save line in hold space
# * purge line
# * Loop:
# * append newline + line to hold space
# * go to next line
# * if line starts with doc comment, strip comment character off and loop
# * remove target prerequisites
# * append hold space (+ newline) to line
# * replace newline plus comments by `---`
# * print line
# Separate expressions are necessary because labels cannot be delimited by
# semicolon; see <http://stackoverflow.com/a/11799865/1968>
.PHONY: help
help:
@echo "$$(tput bold)Available rules:$$(tput sgr0)"
@echo
@sed -n -e "/^## / { \
h; \
s/.*//; \
:doc" \
-e "H; \
n; \
s/^## //; \
t doc" \
-e "s/:.*//; \
G; \
s/\\n## /---/; \
s/\\n/ /g; \
p; \
}" ${MAKEFILE_LIST} \
| LC_ALL='C' sort --ignore-case \
| awk -F '---' \
-v ncol=$$(tput cols) \
-v indent=19 \
-v col_on="$$(tput setaf 6)" \
-v col_off="$$(tput sgr0)" \
'{ \
printf "%s%*s%s ", col_on, -indent, $$1, col_off; \
n = split($$2, words, " "); \
line_length = ncol - indent; \
for (i = 1; i <= n; i++) { \
line_length -= length(words[i]) + 1; \
if (line_length <= 0) { \
line_length = ncol - indent - length(words[i]) - 1; \
printf "\n%*s ", -indent, " "; \
} \
printf "%s ", words[i]; \
} \
printf "\n"; \
}' \
| more $(shell test $(shell uname) = Darwin && echo '--no-init --raw-control-chars')
deepstrom_network
==============================
Projet de recherche: Deepstrom Networks
Project Organization
------------
├── LICENSE
├── Makefile <- Makefile with commands like `make data` or `make train`
├── README.md <- The top-level README for developers using this project.
├── data
│   ├── external <- Data from third party sources.
│   ├── interim <- Intermediate data that has been transformed.
│   ├── processed <- The final, canonical data sets for modeling.
│   └── raw <- The original, immutable data dump.
├── docs <- A default Sphinx project; see sphinx-doc.org for details
├── models <- Trained and serialized models, model predictions, or model summaries
├── notebooks <- Jupyter notebooks. Naming convention is a number (for ordering),
│ the creator's initials, and a short `-` delimited description, e.g.
`1.0-jqp-initial-data-exploration`.
├── references <- Data dictionaries, manuals, and all other explanatory materials.
├── reports <- Generated analysis as HTML, PDF, LaTeX, etc.
│   └── figures <- Generated graphics and figures to be used in reporting
├── requirements.txt <- The requirements file for reproducing the analysis environment, e.g.
│ generated with `pip freeze > requirements.txt`
├── setup.py <- makes project pip installable (pip install -e .) so src can be imported
├── src <- Source code for use in this project.
│   ├── __init__.py <- Makes src a Python module
│ │
│   ├── data <- Scripts to download or generate data
│   │   └── make_dataset.py
│ │
│   ├── features <- Scripts to turn raw data into features for modeling
│   │   └── build_features.py
│ │
│   ├── models <- Scripts to train models and then use trained models to make
│ │ │ predictions
│   │   ├── predict_model.py
│   │   └── train_model.py
│ │
│   └── visualization <- Scripts to create exploratory and results oriented visualizations
│   └── visualize.py
└── tox.ini <- tox file with settings for running tox; see tox.testrun.org
--------
<p><small>Project based on the <a target="_blank" href="https://drivendata.github.io/cookiecutter-data-science/">cookiecutter data science project template</a>. #cookiecutterdatascience</small></p>
dense -e 200 -s 64 -v 10000 -a 0 -q --cifar10 -d 0.5 -D 16
dense -e 200 -s 64 -v 10000 -a 0 -q --cifar10 -d 0.5 -D 64
dense -e 200 -s 64 -v 10000 -a 0 -q --cifar10 -d 0.5 -D 128
dense -e 200 -s 64 -v 10000 -a 0 -q --cifar10 -d 0.5 -D 256
dense -e 200 -s 64 -v 10000 -a 0 -q --cifar10 -d 0.5 -D 512
dense -e 200 -s 64 -v 10000 -a 0 -q --cifar10 -d 1.0 -D 16
dense -e 200 -s 64 -v 10000 -a 0 -q --cifar10 -d 1.0 -D 64
dense -e 200 -s 64 -v 10000 -a 0 -q --cifar10 -d 1.0 -D 128
dense -e 200 -s 64 -v 10000 -a 0 -q --cifar10 -d 1.0 -D 256
dense -e 200 -s 64 -v 10000 -a 0 -q --cifar10 -d 1.0 -D 512
deepstrom -e 200 -s 64 -v 10000 -a 0 -q --cifar10 -d 0.5 -m 8 -L
deepstrom -e 200 -s 64 -v 10000 -a 0 -q --cifar10 -d 0.5 -m 16 -L
deepstrom -e 200 -s 64 -v 10000 -a 0 -q --cifar10 -d 0.5 -m 32 -L
deepstrom -e 200 -s 64 -v 10000 -a 0 -q --cifar10 -d 1.0 -m 8 -L
deepstrom -e 200 -s 64 -v 10000 -a 0 -q --cifar10 -d 1.0 -m 16 -L
deepstrom -e 200 -s 64 -v 10000 -a 0 -q --cifar10 -d 1.0 -m 32 -L
deepstrom -e 200 -s 64 -v 10000 -a 0 -q --cifar10 -d 0.5 -m 8 -g 0.01 -R
deepstrom -e 200 -s 64 -v 10000 -a 0 -q --cifar10 -d 0.5 -m 8 -g 0.05 -R
deepstrom -e 200 -s 64 -v 10000 -a 0 -q --cifar10 -d 0.5 -m 8 -g 0.1 -R
deepstrom -e 200 -s 64 -v 10000 -a 0 -q --cifar10 -d 0.5 -m 8 -g 0.5 -R
deepstrom -e 200 -s 64 -v 10000 -a 0 -q --cifar10 -d 0.5 -m 16 -g 0.01 -R
deepstrom -e 200 -s 64 -v 10000 -a 0 -q --cifar10 -d 0.5 -m 16 -g 0.05 -R
deepstrom -e 200 -s 64 -v 10000 -a 0 -q --cifar10 -d 0.5 -m 16 -g 0.1 -R
deepstrom -e 200 -s 64 -v 10000 -a 0 -q --cifar10 -d 0.5 -m 16 -g 0.5 -R
deepstrom -e 200 -s 64 -v 10000 -a 0 -q --cifar10 -d 0.5 -m 32 -g 0.01 -R
deepstrom -e 200 -s 64 -v 10000 -a 0 -q --cifar10 -d 0.5 -m 32 -g 0.05 -R
deepstrom -e 200 -s 64 -v 10000 -a 0 -q --cifar10 -d 0.5 -m 32 -g 0.1 -R
deepstrom -e 200 -s 64 -v 10000 -a 0 -q --cifar10 -d 0.5 -m 32 -g 0.5 -R
deepstrom -e 200 -s 64 -v 10000 -a 0 -q --cifar10 -d 1.0 -m 8 -g 0.01 -R
deepstrom -e 200 -s 64 -v 10000 -a 0 -q --cifar10 -d 1.0 -m 8 -g 0.05 -R
deepstrom -e 200 -s 64 -v 10000 -a 0 -q --cifar10 -d 1.0 -m 8 -g 0.1 -R
deepstrom -e 200 -s 64 -v 10000 -a 0 -q --cifar10 -d 1.0 -m 8 -g 0.5 -R
deepstrom -e 200 -s 64 -v 10000 -a 0 -q --cifar10 -d 1.0 -m 16 -g 0.01 -R
deepstrom -e 200 -s 64 -v 10000 -a 0 -q --cifar10 -d 1.0 -m 16 -g 0.05 -R
deepstrom -e 200 -s 64 -v 10000 -a 0 -q --cifar10 -d 1.0 -m 16 -g 0.1 -R
deepstrom -e 200 -s 64 -v 10000 -a 0 -q --cifar10 -d 1.0 -m 16 -g 0.5 -R
deepstrom -e 200 -s 64 -v 10000 -a 0 -q --cifar10 -d 1.0 -m 32 -g 0.01 -R
deepstrom -e 200 -s 64 -v 10000 -a 0 -q --cifar10 -d 1.0 -m 32 -g 0.05 -R
deepstrom -e 200 -s 64 -v 10000 -a 0 -q --cifar10 -d 1.0 -m 32 -g 0.1 -R
deepstrom -e 200 -s 64 -v 10000 -a 0 -q --cifar10 -d 1.0 -m 32 -g 0.5 -R
all:
dense:
deepstrom_no_gamma:
deepstrom_gamma:
base:
epoch_numbers: {"-e": [200]}
batch_sizes: {"-s": [64]}
val_size: {"-v": [10000]}
seed: {"-a": "range(1)"}
quiet: ["-q"]
dataset: ["--cifar10"]
dropout: {"-d": [0.5, 1.0]}
gamma:
gamma: {"-g": [0.01, 0.05, 0.1, 0.5]}
deepstrom:
network: ["deepstrom"]
base:
nys_size: {"-m": [8, 16, 32]}
deepstrom_no_gamma:
deepstrom:
kernel: ["-L"]
deepstrom_gamma:
deepstrom:
gamma:
kernel: ["-R"]
dense:
network: ["dense"]
base:
repr_dim: {"-D": [16, 64, 128, 256, 512]}
\ No newline at end of file
all:
dense:
base:
epoch_numbers: {"-e": [200]}
batch_sizes: {"-s": [64]}
val_size: {"-v": [10000]}
seed: {"-a": "range(1)"}
quiet: ["-q"]
dataset: ["--cifar10", "--svhn", "--mnist"]
dropout: {"-d": [0.5, 0.75, 1.0]}
dense:
network: ["dense"]
base:
repr_dim: {"-D": [16, 64, 128, 256]}
deepstrom -e 200 -s 128 -a 0 -q -m 8 -L
deepstrom -e 200 -s 128 -a 0 -q -m 16 -L
deepstrom -e 200 -s 128 -a 0 -q -m 32 -L
deepstrom -e 200 -s 128 -a 0 -q -m 8 -g 0.01 -R
deepstrom -e 200 -s 128 -a 0 -q -m 8 -g 0.05 -R
deepstrom -e 200 -s 128 -a 0 -q -m 8 -g 0.1 -R
deepstrom -e 200 -s 128 -a 0 -q -m 8 -g 0.5 -R
deepstrom -e 200 -s 128 -a 0 -q -m 16 -g 0.01 -R
deepstrom -e 200 -s 128 -a 0 -q -m 16 -g 0.05 -R
deepstrom -e 200 -s 128 -a 0 -q -m 16 -g 0.1 -R
deepstrom -e 200 -s 128 -a 0 -q -m 16 -g 0.5 -R
deepstrom -e 200 -s 128 -a 0 -q -m 32 -g 0.01 -R
deepstrom -e 200 -s 128 -a 0 -q -m 32 -g 0.05 -R
deepstrom -e 200 -s 128 -a 0 -q -m 32 -g 0.1 -R
deepstrom -e 200 -s 128 -a 0 -q -m 32 -g 0.5 -R
deepstrom -e 200 -s 128 -a 0 -q -m 64 -L
deepstrom -e 200 -s 128 -a 0 -q -m 64 -C
deepstrom -e 200 -s 128 -a 0 -q -m 128 -L
deepstrom -e 200 -s 128 -a 0 -q -m 128 -C
deepstrom -e 200 -s 128 -a 0 -q -m 256 -L
deepstrom -e 200 -s 128 -a 0 -q -m 256 -C
deepstrom -e 200 -s 128 -a 0 -q -m 64 -g 0.01 -R
deepstrom -e 200 -s 128 -a 0 -q -m 64 -g 0.05 -R
deepstrom -e 200 -s 128 -a 0 -q -m 64 -g 0.1 -R
deepstrom -e 200 -s 128 -a 0 -q -m 64 -g 0.5 -R
deepstrom -e 200 -s 128 -a 0 -q -m 128 -g 0.01 -R
deepstrom -e 200 -s 128 -a 0 -q -m 128 -g 0.05 -R
deepstrom -e 200 -s 128 -a 0 -q -m 128 -g 0.1 -R
deepstrom -e 200 -s 128 -a 0 -q -m 128 -g 0.5 -R
deepstrom -e 200 -s 128 -a 0 -q -m 256 -g 0.01 -R
deepstrom -e 200 -s 128 -a 0 -q -m 256 -g 0.05 -R
deepstrom -e 200 -s 128 -a 0 -q -m 256 -g 0.1 -R
deepstrom -e 200 -s 128 -a 0 -q -m 256 -g 0.5 -R
deepstrom -e 200 -s 128 -a 0 -q -m 8 -C
deepstrom -e 200 -s 128 -a 0 -q -m 16 -C
deepstrom -e 200 -s 128 -a 0 -q -m 32 -C
deepstrom -e 200 -s 128 -a 0 -q -m 64 -C
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --cifar10 -d 0.5 -T -m 8 -g '0.01;0.05;0.1;0.5'
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --cifar10 -d 0.5 -T -m 16 -g '0.01;0.05;0.1;0.5'
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --cifar10 -d 0.5 -T -m 32 -g '0.01;0.05;0.1;0.5'
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --cifar10 -d 0.5 -m 8 -g '0.01;0.05;0.1;0.5'
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --cifar10 -d 0.5 -m 16 -g '0.01;0.05;0.1;0.5'
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --cifar10 -d 0.5 -m 32 -g '0.01;0.05;0.1;0.5'
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --cifar10 -d 1.0 -T -m 8 -g '0.01;0.05;0.1;0.5'
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --cifar10 -d 1.0 -T -m 16 -g '0.01;0.05;0.1;0.5'
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --cifar10 -d 1.0 -T -m 32 -g '0.01;0.05;0.1;0.5'
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --cifar10 -d 1.0 -m 8 -g '0.01;0.05;0.1;0.5'
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --cifar10 -d 1.0 -m 16 -g '0.01;0.05;0.1;0.5'
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --cifar10 -d 1.0 -m 32 -g '0.01;0.05;0.1;0.5'
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --cifar10 -d 0.5 -T -m 64 -g '0.01;0.05;0.1;0.5'
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --cifar10 -d 0.5 -T -m 128 -g '0.01;0.05;0.1;0.5'
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --cifar10 -d 0.5 -m 64 -g '0.01;0.05;0.1;0.5'
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --cifar10 -d 0.5 -m 128 -g '0.01;0.05;0.1;0.5'
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --cifar10 -d 1.0 -T -m 64 -g '0.01;0.05;0.1;0.5'
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --cifar10 -d 1.0 -T -m 128 -g '0.01;0.05;0.1;0.5'
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --cifar10 -d 1.0 -m 64 -g '0.01;0.05;0.1;0.5'
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --cifar10 -d 1.0 -m 128 -g '0.01;0.05;0.1;0.5'
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --svhn -m 8 -L
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --svhn -m 8 -C
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --svhn -m 16 -L
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --svhn -m 16 -C
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --svhn -m 32 -L
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --svhn -m 32 -C
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --svhn -m 64 -L
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --svhn -m 64 -C
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --svhn -m 128 -L
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --svhn -m 128 -C
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --mnist -m 8 -L
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --mnist -m 8 -C
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --mnist -m 16 -L
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --mnist -m 16 -C
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --mnist -m 32 -L
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --mnist -m 32 -C
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --mnist -m 64 -L
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --mnist -m 64 -C
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --mnist -m 128 -L
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --mnist -m 128 -C
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --svhn -m 8 -g 0.01 -R
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --svhn -m 8 -g 0.1 -R
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --svhn -m 8 -g 1.0 -R
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --svhn -m 16 -g 0.01 -R
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --svhn -m 16 -g 0.1 -R
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --svhn -m 16 -g 1.0 -R
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --svhn -m 32 -g 0.01 -R
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --svhn -m 32 -g 0.1 -R
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --svhn -m 32 -g 1.0 -R
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --svhn -m 64 -g 0.01 -R
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --svhn -m 64 -g 0.1 -R
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --svhn -m 64 -g 1.0 -R
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --svhn -m 128 -g 0.01 -R
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --svhn -m 128 -g 0.1 -R
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --svhn -m 128 -g 1.0 -R
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --mnist -m 8 -g 0.01 -R
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --mnist -m 8 -g 0.1 -R
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --mnist -m 8 -g 1.0 -R
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --mnist -m 16 -g 0.01 -R
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --mnist -m 16 -g 0.1 -R
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --mnist -m 16 -g 1.0 -R
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --mnist -m 32 -g 0.01 -R
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --mnist -m 32 -g 0.1 -R
deepstrom -e 200 -s 128 -v 10000 -a 0 -q --mnist -m 32 -g 1.0 -R
deep