Skip to content

Instantly share code, notes, and snippets.

@mihaelacr
mihaelacr / hyperopt_exampleMNIST.py
Last active August 29, 2015 14:07
Changes to the pull request to solve some of the problems with it
# -*- coding: utf-8 -*-
"""
This module creates an optimization of hyper-parameters of a DBN using hyperopt library
Check out the library here: https://github.com/hyperopt/hyperopt).
example run:
python hyperopt_exampleMNIST.py --trainSize=10000 --path=...
REFERENCES
[1] - Bergstra, James, Dan Yamins, and David D. Cox. "Hyperopt: A python library for optimizing the hyperparameters of machine learning algorithms." (2013).
def MNISTmain():
# TODO: max_col_norm
h0 = maxout.Maxout(layer_name='h0', num_units=1200, num_pieces=2, W_lr_scale=1.0, irange=0.005, b_lr_scale=1.0)
h1 = maxout.Maxout(layer_name='h1', num_units=1200, num_pieces=2, W_lr_scale=1.0, irange=0.005, b_lr_scale=1.0)
# h2 = maxout.Maxout(layer_name='h2, num_units=1200, num_pieces=2, W_lr_scale=1.0, irange=0.005, b_lr_scale=1.0)
outlayer = mlp.Softmax(layer_name='y', n_classes=10, irange=0)
# layers = [h0, h1, h2, outlayer]
layers = [h0, h1, outlayer]
# The aim of this file is to check
# the theory that softmax is caused by the same input
import numpy as np
a = np.array([[-0., -0., -0., -1., -0., -0., -0., -0., -0., -0.],
[-0., -0., -0., -1., -0., -0., -0., -0., -0., -0.],
[-0., -0., -0., -0., -0., -0., -0., -0., -1., -0.],
[-0., -1., -0., -0., -0., -0., -0., -0., -0., -0.],
[-0., -0., -0., -1., -0., -0., -0., -0., -0., -0.],
import numpy as np
import restrictedBoltzmannMachine as rbm
import theano
from theano import tensor as T
theanoFloat = theano.config.floatX
"""In all the above topLayer does not mean the top most layer, but rather the
layer above the current one."""