Skip to content
Snippets Groups Projects
Commit 4a92670e authored by Luc Giffon's avatar Luc Giffon
Browse files

modification on the build of P in order to make it work with multiple dimensions

parent bc0cbecf
No related branches found
No related tags found
No related merge requests found
......@@ -42,7 +42,7 @@ def max_pool_2x2(x):
strides=[1, 2, 2, 1], padding='SAME')
def convolution(input):
def convolution_mnist(input):
with tf.name_scope("conv_pool_1"):
# 32 is the number of filter we'll use. e.g. the number of different
# shapes this layer is able to recognize
......@@ -118,7 +118,7 @@ def P_variable(d, nbr_stack):
:type nbr_stack: int
:return: tf.Variable object containing the matrix
"""
idx = [(i * d) + np.random.permutation(d) for i in range(nbr_stack)]
idx = np.hstack([(i * d) + np.random.permutation(d) for i in range(nbr_stack)])
P = np.random.permutation(np.eye(N=nbr_stack * d))[idx].astype(np.float32)
return tf.Variable(P, name="P", trainable=False)
......@@ -259,21 +259,29 @@ def fully_connected(conv_out):
return h_fc1
def mnist_dims():
input_dim = int(mnist.train.images.shape[1])
output_dim = int(mnist.train.labels.shape[1])
return input_dim, output_dim
if __name__ == '__main__':
SIGMA = 5.0
print("Sigma = {}".format(SIGMA))
with tf.Graph().as_default():
input_dim = int(mnist.train.images.shape[1])
output_dim = int(mnist.train.labels.shape[1])
side_size = int(np.sqrt(input_dim))
# todo parametrize datset
input_dim, output_dim = mnist_dims()
x = tf.placeholder(tf.float32, shape=[None, input_dim], name="x")
y_ = tf.placeholder(tf.float32, shape=[None, output_dim], name="labels")
# side size is width or height of the images
side_size = int(np.sqrt(input_dim))
x_image = tf.reshape(x, [-1, side_size, side_size, 1])
tf.summary.image("digit", x_image, max_outputs=3)
# Representation layer
h_conv = convolution(x_image)
h_conv = convolution_mnist(x_image)
# h_conv = x
# out_fc = fully_connected(h_conv) # 95% accuracy
# out_fc = tf.nn.relu(fast_food(h_conv, SIGMA, nbr_stack=1)) # 83% accuracy (conv) | 56% accuracy (noconv)
......@@ -288,8 +296,8 @@ if __name__ == '__main__':
keep_prob = tf.placeholder(tf.float32, name="keep_prob")
h_fc1_drop = tf.nn.dropout(out_fc, keep_prob)
dim = np.prod([s.value for s in h_fc1_drop.shape if s.value is not None])
W_fc2 = weight_variable([dim, 10])
b_fc2 = bias_variable([10])
W_fc2 = weight_variable([dim, output_dim])
b_fc2 = bias_variable([output_dim])
tf.summary.histogram("weights", W_fc2)
tf.summary.histogram("biases", b_fc2)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment