From 3a0d97879bd3fd5d04367e383336b09b5dcf79b5 Mon Sep 17 00:00:00 2001 From: Jan Date: Sat, 2 Jul 2022 15:50:28 -0600 Subject: [PATCH] adding variabke number of dense layers and the simplest example consisting of one cell only --- fmda_kf_rnn.ipynb | 34 +++++++++++++++++++++++++++++----- 1 file changed, 29 insertions(+), 5 deletions(-) diff --git a/fmda_kf_rnn.ipynb b/fmda_kf_rnn.ipynb index 679665b..c418b5d 100644 --- a/fmda_kf_rnn.ipynb +++ b/fmda_kf_rnn.ipynb @@ -1413,7 +1413,8 @@ }, "outputs": [], "source": [ - "def create_RNN_2(hidden_units, dense_units, activation, stateful=False, batch_shape=None, input_shape=None):\n", + "def create_RNN_2(hidden_units, dense_units, activation, stateful=False, \n", + " batch_shape=None, input_shape=None, dense_layers=1):\n", " if stateful:\n", " inputs = tf.keras.Input(batch_shape=batch_shape)\n", " else:\n", @@ -1423,27 +1424,50 @@ " x = inputs\n", " x = tf.keras.layers.SimpleRNN(hidden_units,activation=activation[0],stateful=stateful)(x)\n", " # x = tf.keras.layers.Dense(hidden_units, activation=activation[1])(x)\n", - " x = tf.keras.layers.Dense(dense_units, activation=activation[1])(x)\n", + " for i in range(dense_layers):\n", + " x = tf.keras.layers.Dense(dense_units, activation=activation[1])(x)\n", " model = tf.keras.Model(inputs=inputs, outputs=x)\n", " model.compile(loss='mean_squared_error', optimizer='adam')\n", " return model\n", "def create_fit_predict_RNN(hidden_units, dense_units, \n", - " samples, timesteps, features, activation):\n", + " samples, timesteps, features, dense_layers=1, activation=['tanh', 'tanh']):\n", " # statefull model version with with fixed number of batches\n", " model_fit=create_RNN_2(hidden_units=hidden_units, dense_units=dense_units, \n", " batch_shape=(samples, timesteps, features),stateful = True,\n", - " activation=activation)\n", + " activation=activation,dense_layers=dense_layers)\n", " print(model_fit.summary())\n", " # same model for prediction on the entire dataset\n", " model_predict=create_RNN_2(hidden_units=hidden_units, dense_units=dense_units, \n", " input_shape=(None,features),stateful = False,\n", - " activation=activation)\n", + " activation=activation,dense_layers=dense_layers)\n", " print(model_predict.summary())\n", " return model_fit, model_predict" ] }, { "cell_type": "code", + "source": [ + "# the simplest model possible\n", + "fmda_model, fmda_model_eval = create_fit_predict_RNN(hidden_units=1, dense_units=1, \n", + " samples=samples, timesteps=timesteps, features=1, dense_layers=0,\n", + " activation=['linear'])\n", + "fmda_model.fit(x_train, y_train, epochs=40, verbose=2,batch_size=samples)\n", + "# Same model as stateless for prediction:\n", + "w=fmda_model.get_weights()\n", + "fmda_model_eval.set_weights(w)\n", + "# prediction on the entire dataset from zero state\n", + "mt = fmda_model_eval.predict(Et)\n", + "m = scalery.inverse_transform(mt)\n", + "plot_m(m,title='RNN prediction')" + ], + "metadata": { + "id": "PZw7DNQD4Inr" + }, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", "execution_count": null, "metadata": { "id": "R2jkoZlAIaSb" -- 2.11.4.GIT