Skip to content

Instantly share code, notes, and snippets.

@ragulpr
Last active December 7, 2020 10:24

Revisions

  1. ragulpr revised this gist Mar 20, 2017. 1 changed file with 41 additions and 2 deletions.
    43 changes: 41 additions & 2 deletions py
    Original file line number Diff line number Diff line change
    @@ -55,6 +55,46 @@ np.random.seed(1)

    from keras.models import Sequential

    def sequential_non_temporal_example():
    model = Sequential()
    model.add(Masking(mask_value = mask_value,input_shape=(n_timesteps, n_features)))
    model.add(Dense(1,activation='linear',kernel_initializer="one"))
    model.summary()

    output_val = model.predict(data)

    print '--> time'
    print np.linspace(0,n_timesteps-1,n_timesteps)
    for s in xrange(n_samples):
    print '# sample = ',s
    print 'input:'
    print(data[s,:,:].T)
    print 'output_val:'
    print(output_val[s,:,:].T)

    def sequential_temporal_example():
    model = Sequential()
    model.add(Masking(mask_value = mask_value,input_shape=(n_timesteps, n_features)))
    model.add(LSTM(2, return_sequences=True, kernel_initializer="one"))
    model.add(Dense(1,activation='linear',kernel_initializer="one"))

    model.summary()

    output_val = model.predict(data)

    import keras
    from keras.layers import *
    from keras.models import Model
    import theano as T
    import tensorflow as tf
    print('theano ver.',T.__version__)
    print('tensorflow ver.',tf.__version__)
    print('keras ver.',keras.__version__)
    np.set_printoptions(precision=4)
    np.random.seed(1)

    from keras.models import Sequential

    def sequential_non_temporal_example():
    model = Sequential()
    model.add(Masking(mask_value = mask_value,input_shape=(n_timesteps, n_features)))
    @@ -95,7 +135,7 @@ def sequential_temporal_example():
    n_samples = 3
    n_timesteps = 7
    n_features = 2
    mask_value = np.nan # -1 # 0.0
    mask_value = np.nan # -999999999.0# -1.0 # -1 # 0.0
    data = np.ones((n_samples,n_timesteps,n_features))

    for s in xrange(n_samples):
    @@ -114,7 +154,6 @@ print '####################### sequential_temporal_example #####################
    # As non-temporal but masked timestep state does not propagate through time:
    sequential_temporal_example()


    # ('theano ver.', '0.8.2')
    # ('tensorflow ver.', '1.0.1')
    # ('keras ver.', '2.0.1')
  2. ragulpr revised this gist Mar 20, 2017. 1 changed file with 47 additions and 2 deletions.
    49 changes: 47 additions & 2 deletions py
    Original file line number Diff line number Diff line change
    @@ -11,6 +11,50 @@ np.random.seed(1)
    # had same results with both tf and T
    from keras.models import Sequential

    def sequential_non_temporal_example():
    model = Sequential()
    model.add(Masking(mask_value = mask_value,input_shape=(n_timesteps, n_features)))
    model.add(Dense(1,activation='linear',kernel_initializer="one"))
    model.summary()

    output_val = model.predict(data)

    print '--> time'
    print np.linspace(0,n_timesteps-1,n_timesteps)
    for s in xrange(n_samples):
    print '# sample = ',s
    print 'input:'
    print(data[s,:,:].T)
    print 'output_val:'
    print(output_val[s,:,:].T)

    def sequential_temporal_example():
    model = Sequential()
    model.add(Masking(mask_value = mask_value,input_shape=(n_timesteps, n_features)))
    model.add(LSTM(2, return_sequences=True, kernel_initializer="one"))
    model.add(Dense(1,activation='linear',kernel_initializer="one"))

    model.summary()

    output_val = model.predict(data)

    print '--> time'
    print np.linspace(0,n_timesteps-1,n_timesteps)
    for s in xrange(n_samples):
    print '# sample = ',s
    import keras
    from keras.layers import *
    from keras.models import Model
    import theano as T
    import tensorflow as tf
    print('theano ver.',T.__version__)
    print('tensorflow ver.',tf.__version__)
    print('keras ver.',keras.__version__)
    np.set_printoptions(precision=4)
    np.random.seed(1)

    from keras.models import Sequential

    def sequential_non_temporal_example():
    model = Sequential()
    model.add(Masking(mask_value = mask_value,input_shape=(n_timesteps, n_features)))
    @@ -51,7 +95,7 @@ def sequential_temporal_example():
    n_samples = 3
    n_timesteps = 7
    n_features = 2
    mask_value = np.nan
    mask_value = np.nan # -1 # 0.0
    data = np.ones((n_samples,n_timesteps,n_features))

    for s in xrange(n_samples):
    @@ -61,7 +105,7 @@ for s in xrange(n_samples):
    # mask a feature value of one sample and timestep (no effect)
    data[1,0,0] = mask_value

    # mask all feature values of one sample and timestep (propagates 0 at layer of step/sample)
    # mask all feature values of one sample and timestep (propagates 0.*mask_value at layer of step/sample?)
    data[2,3,:] = mask_value

    print '####################### sequential_non_temporal_example #######################:'
    @@ -70,6 +114,7 @@ print '####################### sequential_temporal_example #####################
    # As non-temporal but masked timestep state does not propagate through time:
    sequential_temporal_example()


    # ('theano ver.', '0.8.2')
    # ('tensorflow ver.', '1.0.1')
    # ('keras ver.', '2.0.1')
  3. ragulpr revised this gist Mar 20, 2017. 1 changed file with 19 additions and 19 deletions.
    38 changes: 19 additions & 19 deletions py
    Original file line number Diff line number Diff line change
    @@ -8,7 +8,7 @@ print('tensorflow ver.',tf.__version__)
    print('keras ver.',keras.__version__)
    np.set_printoptions(precision=4)
    np.random.seed(1)

    # had same results with both tf and T
    from keras.models import Sequential

    def sequential_non_temporal_example():
    @@ -51,7 +51,7 @@ def sequential_temporal_example():
    n_samples = 3
    n_timesteps = 7
    n_features = 2
    mask_value = 0#np.nan
    mask_value = np.nan
    data = np.ones((n_samples,n_timesteps,n_features))

    for s in xrange(n_samples):
    @@ -77,9 +77,9 @@ sequential_temporal_example()
    # _________________________________________________________________
    # Layer (type) Output Shape Param #
    # =================================================================
    # masking_63 (Masking) (None, 7, 2) 0
    # masking_1 (Masking) (None, 7, 2) 0
    # _________________________________________________________________
    # dense_14 (Dense) (None, 7, 1) 3
    # dense_1 (Dense) (None, 7, 1) 3
    # =================================================================
    # Total params: 3.0
    # Trainable params: 3.0
    @@ -95,25 +95,25 @@ sequential_temporal_example()
    # [[ 2. 4. 6. 8. 10. 12. 14.]]
    # # sample = 1
    # input:
    # [[ 0. 2. 3. 4. 5. 6. 7.]
    # [ 1. 2. 3. 4. 5. 6. 7.]]
    # [[ nan 2. 3. 4. 5. 6. 7.]
    # [ 1. 2. 3. 4. 5. 6. 7.]]
    # output_val:
    # [[ 1. 4. 6. 8. 10. 12. 14.]]
    # [[ nan 4. 6. 8. 10. 12. 14.]]
    # # sample = 2
    # input:
    # [[ 1. 2. 3. 0. 5. 6. 7.]
    # [ 1. 2. 3. 0. 5. 6. 7.]]
    # [[ 1. 2. 3. nan 5. 6. 7.]
    # [ 1. 2. 3. nan 5. 6. 7.]]
    # output_val:
    # [[ 2. 4. 6. 0. 10. 12. 14.]]
    # [[ 2. 4. 6. nan 10. 12. 14.]]
    # ####################### sequential_temporal_example #######################:
    # _________________________________________________________________
    # Layer (type) Output Shape Param #
    # =================================================================
    # masking_64 (Masking) (None, 7, 2) 0
    # masking_2 (Masking) (None, 7, 2) 0
    # _________________________________________________________________
    # lstm_122 (LSTM) (None, 7, 2) 40
    # lstm_1 (LSTM) (None, 7, 2) 40
    # _________________________________________________________________
    # dense_15 (Dense) (None, 7, 1) 3
    # dense_2 (Dense) (None, 7, 1) 3
    # =================================================================
    # Total params: 43.0
    # Trainable params: 43.0
    @@ -129,13 +129,13 @@ sequential_temporal_example()
    # [[ 1.2603 1.9066 1.9871 1.9982 1.9998 2. 2. ]]
    # # sample = 1
    # input:
    # [[ 0. 2. 3. 4. 5. 6. 7.]
    # [ 1. 2. 3. 4. 5. 6. 7.]]
    # [[ nan 2. 3. 4. 5. 6. 7.]
    # [ 1. 2. 3. 4. 5. 6. 7.]]
    # output_val:
    # [[ 0.6829 1.8217 1.9749 1.9966 1.9995 1.9999 2. ]]
    # [[ nan nan nan nan nan nan nan]]
    # # sample = 2
    # input:
    # [[ 1. 2. 3. 0. 5. 6. 7.]
    # [ 1. 2. 3. 0. 5. 6. 7.]]
    # [[ 1. 2. 3. nan 5. 6. 7.]
    # [ 1. 2. 3. nan 5. 6. 7.]]
    # output_val:
    # [[ 1.2603 1.9066 1.9871 1.9871 1.9982 1.9998 2. ]]
    # [[ 1.2603 1.9066 1.9871 nan nan nan nan]]
  4. ragulpr created this gist Mar 20, 2017.
    141 changes: 141 additions & 0 deletions py
    Original file line number Diff line number Diff line change
    @@ -0,0 +1,141 @@
    import keras
    from keras.layers import *
    from keras.models import Model
    import theano as T
    import tensorflow as tf
    print('theano ver.',T.__version__)
    print('tensorflow ver.',tf.__version__)
    print('keras ver.',keras.__version__)
    np.set_printoptions(precision=4)
    np.random.seed(1)

    from keras.models import Sequential

    def sequential_non_temporal_example():
    model = Sequential()
    model.add(Masking(mask_value = mask_value,input_shape=(n_timesteps, n_features)))
    model.add(Dense(1,activation='linear',kernel_initializer="one"))
    model.summary()

    output_val = model.predict(data)

    print '--> time'
    print np.linspace(0,n_timesteps-1,n_timesteps)
    for s in xrange(n_samples):
    print '# sample = ',s
    print 'input:'
    print(data[s,:,:].T)
    print 'output_val:'
    print(output_val[s,:,:].T)

    def sequential_temporal_example():
    model = Sequential()
    model.add(Masking(mask_value = mask_value,input_shape=(n_timesteps, n_features)))
    model.add(LSTM(2, return_sequences=True, kernel_initializer="one"))
    model.add(Dense(1,activation='linear',kernel_initializer="one"))

    model.summary()

    output_val = model.predict(data)

    print '--> time'
    print np.linspace(0,n_timesteps-1,n_timesteps)
    for s in xrange(n_samples):
    print '# sample = ',s
    print 'input:'
    print(data[s,:,:].T)
    print 'output_val:'
    print(output_val[s,:,:].T)


    n_samples = 3
    n_timesteps = 7
    n_features = 2
    mask_value = 0#np.nan
    data = np.ones((n_samples,n_timesteps,n_features))

    for s in xrange(n_samples):
    for f in xrange(n_features):
    data[s,:,f] = np.linspace(1,n_timesteps,n_timesteps)

    # mask a feature value of one sample and timestep (no effect)
    data[1,0,0] = mask_value

    # mask all feature values of one sample and timestep (propagates 0 at layer of step/sample)
    data[2,3,:] = mask_value

    print '####################### sequential_non_temporal_example #######################:'
    sequential_non_temporal_example()
    print '####################### sequential_temporal_example #######################:'
    # As non-temporal but masked timestep state does not propagate through time:
    sequential_temporal_example()

    # ('theano ver.', '0.8.2')
    # ('tensorflow ver.', '1.0.1')
    # ('keras ver.', '2.0.1')
    # ####################### sequential_non_temporal_example #######################:
    # _________________________________________________________________
    # Layer (type) Output Shape Param #
    # =================================================================
    # masking_63 (Masking) (None, 7, 2) 0
    # _________________________________________________________________
    # dense_14 (Dense) (None, 7, 1) 3
    # =================================================================
    # Total params: 3.0
    # Trainable params: 3.0
    # Non-trainable params: 0.0
    # _________________________________________________________________
    # --> time
    # [ 0. 1. 2. 3. 4. 5. 6.]
    # # sample = 0
    # input:
    # [[ 1. 2. 3. 4. 5. 6. 7.]
    # [ 1. 2. 3. 4. 5. 6. 7.]]
    # output_val:
    # [[ 2. 4. 6. 8. 10. 12. 14.]]
    # # sample = 1
    # input:
    # [[ 0. 2. 3. 4. 5. 6. 7.]
    # [ 1. 2. 3. 4. 5. 6. 7.]]
    # output_val:
    # [[ 1. 4. 6. 8. 10. 12. 14.]]
    # # sample = 2
    # input:
    # [[ 1. 2. 3. 0. 5. 6. 7.]
    # [ 1. 2. 3. 0. 5. 6. 7.]]
    # output_val:
    # [[ 2. 4. 6. 0. 10. 12. 14.]]
    # ####################### sequential_temporal_example #######################:
    # _________________________________________________________________
    # Layer (type) Output Shape Param #
    # =================================================================
    # masking_64 (Masking) (None, 7, 2) 0
    # _________________________________________________________________
    # lstm_122 (LSTM) (None, 7, 2) 40
    # _________________________________________________________________
    # dense_15 (Dense) (None, 7, 1) 3
    # =================================================================
    # Total params: 43.0
    # Trainable params: 43.0
    # Non-trainable params: 0.0
    # _________________________________________________________________
    # --> time
    # [ 0. 1. 2. 3. 4. 5. 6.]
    # # sample = 0
    # input:
    # [[ 1. 2. 3. 4. 5. 6. 7.]
    # [ 1. 2. 3. 4. 5. 6. 7.]]
    # output_val:
    # [[ 1.2603 1.9066 1.9871 1.9982 1.9998 2. 2. ]]
    # # sample = 1
    # input:
    # [[ 0. 2. 3. 4. 5. 6. 7.]
    # [ 1. 2. 3. 4. 5. 6. 7.]]
    # output_val:
    # [[ 0.6829 1.8217 1.9749 1.9966 1.9995 1.9999 2. ]]
    # # sample = 2
    # input:
    # [[ 1. 2. 3. 0. 5. 6. 7.]
    # [ 1. 2. 3. 0. 5. 6. 7.]]
    # output_val:
    # [[ 1.2603 1.9066 1.9871 1.9871 1.9982 1.9998 2. ]]