Skip to content

Instantly share code, notes, and snippets.

@dilin993
Created November 26, 2018 17:18
Show Gist options
  • Save dilin993/bd5fca3d2ec690c09c0b91f4f061e955 to your computer and use it in GitHub Desktop.
Save dilin993/bd5fca3d2ec690c09c0b91f4f061e955 to your computer and use it in GitHub Desktop.
# Training Parameters
learning_rate = 0.01
num_steps = 30000
batch_size = 10000
display_step = 1000
examples_to_show = 10
# Network Parameters
num_input = len(FEATURE_COLUMNS) # no. of features selected
num_hidden_1 = int(num_input/2 + 1) # 1st layer num features
num_hidden_2 = int(num_hidden_1/2 + 1) # 2nd layer num features (the latent dim)
# tf Graph input
X = tf.placeholder("float", [None, num_input])
weights = {
'encoder_h1': tf.Variable(tf.random_normal([num_input, num_hidden_1])),
'encoder_h2': tf.Variable(tf.random_normal([num_hidden_1, num_hidden_2])),
'decoder_h1': tf.Variable(tf.random_normal([num_hidden_2, num_hidden_1])),
'decoder_h2': tf.Variable(tf.random_normal([num_hidden_1, num_input])),
}
biases = {
'encoder_b1': tf.Variable(tf.random_normal([num_hidden_1])),
'encoder_b2': tf.Variable(tf.random_normal([num_hidden_2])),
'decoder_b1': tf.Variable(tf.random_normal([num_hidden_1])),
'decoder_b2': tf.Variable(tf.random_normal([num_input])),
}
# Building the encoder
def encoder(x):
# Encoder Hidden layer with sigmoid activation #1
layer_1 = tf.nn.sigmoid(tf.add(tf.matmul(x, weights['encoder_h1']),
biases['encoder_b1']))
# Encoder Hidden layer with sigmoid activation #2
layer_2 = tf.nn.sigmoid(tf.add(tf.matmul(layer_1, weights['encoder_h2']),
biases['encoder_b2']))
return layer_2
# Building the decoder
def decoder(x):
# Decoder Hidden layer with sigmoid activation #1
layer_1 = tf.nn.sigmoid(tf.add(tf.matmul(x, weights['decoder_h1']),
biases['decoder_b1']))
# Decoder Hidden layer with sigmoid activation #2
layer_2 = tf.nn.sigmoid(tf.add(tf.matmul(layer_1, weights['decoder_h2']),
biases['decoder_b2']))
return layer_2
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment